diff --git a/.github/actions/qt-android/action.yml b/.github/actions/qt-android/action.yml index dbc1ca3a988f..740bbd353667 100644 --- a/.github/actions/qt-android/action.yml +++ b/.github/actions/qt-android/action.yml @@ -77,7 +77,7 @@ runs: target: desktop arch: ${{ inputs.arch }} dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true @@ -90,7 +90,7 @@ runs: target: android arch: android_arm64_v8a dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true @@ -103,7 +103,7 @@ runs: target: android arch: android_armv7 dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true @@ -116,7 +116,7 @@ runs: target: android arch: android_x86_64 dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true @@ -129,6 +129,6 @@ runs: target: android arch: android_x86 dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true diff --git a/.github/workflows/custom.yml b/.github/workflows/custom.yml index d1beea4eadcc..639475866af4 100644 --- a/.github/workflows/custom.yml +++ b/.github/workflows/custom.yml @@ -74,7 +74,7 @@ jobs: target: desktop arch: win64_msvc2022_64 dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true diff --git a/.github/workflows/ios.yml b/.github/workflows/ios.yml index 0c9bfc3fd265..38fb36315223 100644 --- a/.github/workflows/ios.yml +++ b/.github/workflows/ios.yml @@ -56,7 +56,7 @@ jobs: target: desktop arch: clang_64 dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true @@ -68,7 +68,7 @@ jobs: target: ios arch: ios dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtwebsockets cache: true - name: Configure diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 34f39fd36c59..311cfd37109e 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -88,7 +88,7 @@ jobs: target: desktop arch: ${{ matrix.arch }} dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index ec105016afaa..7fe73d443911 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -72,7 +72,7 @@ jobs: target: desktop arch: clang_64 dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 4a52f27b8a8a..ccf914a4482b 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -93,7 +93,7 @@ jobs: target: desktop arch: ${{ matrix.arch }} dir: ${{ runner.temp }} - modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml + modules: qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets setup-python: false cache: true diff --git a/.gitignore b/.gitignore index b4f4b6b9fad3..bec53d31cb88 100644 --- a/.gitignore +++ b/.gitignore @@ -77,6 +77,12 @@ ui_* android/local.properties *.class +# python +__pycache__/ +*.py[cod] +*$py.class +*.pyc + # doxygen src/html/ src/latex/ diff --git a/CMakeLists.txt b/CMakeLists.txt index 5166adfaf7b2..9d7f9db8d846 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -176,6 +176,7 @@ find_package(Qt6 Sql Svg TextToSpeech + WebSockets Widgets Xml StateMachine diff --git a/cmake/find-modules/FindGStreamer.cmake b/cmake/find-modules/FindGStreamer.cmake index af3a1c208231..9749edfc7f92 100644 --- a/cmake/find-modules/FindGStreamer.cmake +++ b/cmake/find-modules/FindGStreamer.cmake @@ -415,6 +415,10 @@ find_gstreamer_component(Rtsp gstreamer-rtsp-1.0) # ---------------------------------------------------------------------------- # Find Optional Components (Based on FIND_COMPONENTS) # ---------------------------------------------------------------------------- +if(App IN_LIST GStreamer_FIND_COMPONENTS) + find_gstreamer_component(App gstreamer-app-1.0) +endif() + if(GlEgl IN_LIST GStreamer_FIND_COMPONENTS) find_gstreamer_component(GlEgl gstreamer-gl-egl-1.0) endif() diff --git a/deploy/vagrant/Vagrantfile b/deploy/vagrant/Vagrantfile index 8f4a97846c56..e8e777e5930c 100644 --- a/deploy/vagrant/Vagrantfile +++ b/deploy/vagrant/Vagrantfile @@ -94,7 +94,7 @@ Vagrant.configure(2) do |config| version="6.10.0" host="linux" target="desktop" - modules="qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml" + modules="qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets" su - vagrant -c "rm -rf ${dir}" su - vagrant -c "mkdir -p ${dir}" su - vagrant -c "python3 -m aqt install-qt -O ${dir} ${host} ${target} ${version} -m ${modules}" diff --git a/src/Settings/Video.SettingsGroup.json b/src/Settings/Video.SettingsGroup.json index aff4a24c8ac1..c50ba8424b0b 100644 --- a/src/Settings/Video.SettingsGroup.json +++ b/src/Settings/Video.SettingsGroup.json @@ -31,6 +31,129 @@ "type": "string", "default": "" }, +{ + "name": "httpUrl", + "shortDesc": "HTTP Video URL", + "longDesc": "HTTP/HTTPS URL for MJPEG video stream. Supports any HTTP MJPEG source (e.g., PixEagle drone, IP cameras, test servers). Format: http://[host]:[port]/[path]. Example: http://127.0.0.1:5077/video_feed for local testing, or http://[camera-ip]/video_feed for network cameras. See test/VideoStreaming/ for test servers.", + "type": "string", + "default": "http://127.0.0.1:5077/video_feed" +}, +{ + "name": "httpTimeout", + "shortDesc": "HTTP Stream Timeout", + "longDesc": "Connection timeout for HTTP video streams in seconds.", + "type": "uint32", + "min": 5, + "max": 60, + "units": "s", + "default": 10 +}, +{ + "name": "httpRetryAttempts", + "shortDesc": "HTTP Retry Attempts", + "longDesc": "Number of automatic retry attempts when HTTP stream connection fails.", + "type": "uint32", + "min": 0, + "max": 10, + "default": 3 +}, +{ + "name": "httpBufferSize", + "shortDesc": "HTTP Buffer Size", + "longDesc": "Size of HTTP read buffer in bytes. Larger values may improve throughput on high-bandwidth connections.", + "type": "uint32", + "min": 1024, + "max": 1048576, + "units": "bytes", + "default": 32768 +}, +{ + "name": "httpKeepAlive", + "shortDesc": "HTTP Keep-Alive", + "longDesc": "Enable HTTP keep-alive for persistent connections. Recommended for better performance.", + "type": "bool", + "default": true +}, +{ + "name": "httpUserAgent", + "shortDesc": "HTTP User Agent", + "longDesc": "User-Agent string sent with HTTP requests. Used for server-side logging and identification.", + "type": "string", + "default": "QGroundControl/4.x" +}, +{ + "name": "websocketUrl", + "shortDesc": "WebSocket Video URL", + "longDesc": "WebSocket URL for video stream. Supports any WebSocket video source that sends JPEG frames with metadata (e.g., PixEagle drone simulator/hardware, test servers). Format: ws://[host]:[port]/[path]. Example: ws://127.0.0.1:5077/ws/video_feed for local testing, or ws://[drone-ip]:8000/ws/video_feed for remote drones. See test/VideoStreaming/ for test servers.", + "type": "string", + "default": "ws://127.0.0.1:5077/ws/video_feed" +}, +{ + "name": "websocketTimeout", + "shortDesc": "WebSocket Timeout", + "longDesc": "Connection timeout for WebSocket video streams in seconds.", + "type": "uint32", + "min": 5, + "max": 60, + "units": "s", + "default": 10 +}, +{ + "name": "websocketReconnectDelay", + "shortDesc": "WebSocket Reconnect Delay", + "longDesc": "Delay in milliseconds before attempting to reconnect after WebSocket disconnection.", + "type": "uint32", + "min": 500, + "max": 10000, + "units": "ms", + "default": 2000 +}, +{ + "name": "websocketHeartbeat", + "shortDesc": "WebSocket Heartbeat Interval", + "longDesc": "Interval in milliseconds for sending heartbeat/ping messages to keep WebSocket connection alive.", + "type": "uint32", + "min": 1000, + "max": 30000, + "units": "ms", + "default": 5000 +}, +{ + "name": "adaptiveQuality", + "shortDesc": "Enable Adaptive Quality", + "longDesc": "Automatically adjust video quality based on network conditions. Only applies to WebSocket streams.", + "type": "bool", + "default": true +}, +{ + "name": "minQuality", + "shortDesc": "Minimum Video Quality", + "longDesc": "Minimum JPEG quality percentage for adaptive quality control. Lower values reduce bandwidth but decrease image quality.", + "type": "uint32", + "min": 1, + "max": 100, + "units": "%", + "default": 60 +}, +{ + "name": "maxQuality", + "shortDesc": "Maximum Video Quality", + "longDesc": "Maximum JPEG quality percentage for adaptive quality control. Higher values improve image quality but increase bandwidth usage.", + "type": "uint32", + "min": 1, + "max": 100, + "units": "%", + "default": 95 +}, +{ + "name": "websocketBufferFrames", + "shortDesc": "WebSocket Buffer Frames", + "longDesc": "Number of frames to buffer in WebSocket receive queue. Lower values reduce latency, higher values improve stability.", + "type": "uint32", + "min": 1, + "max": 10, + "default": 3 +}, { "name": "videoSavePath", "shortDesc": "Video save directory", @@ -60,7 +183,7 @@ "type": "uint32", "enumStrings": "Fit Width,Fit Height,Fill,No Crop", "enumValues": "0,1,2,3", - "default": 1 + "default": 0 }, { "name": "showRecControl", diff --git a/src/Settings/VideoSettings.cc b/src/Settings/VideoSettings.cc index 084afafa4a64..4ace1d72e589 100644 --- a/src/Settings/VideoSettings.cc +++ b/src/Settings/VideoSettings.cc @@ -29,6 +29,8 @@ DECLARE_SETTINGGROUP(Video, "Video") videoSourceList.append(videoSourceUDPH265); videoSourceList.append(videoSourceTCP); videoSourceList.append(videoSourceMPEGTS); + videoSourceList.append(videoSourceHTTP); + videoSourceList.append(videoSourceWebSocket); videoSourceList.append(videoSource3DRSolo); videoSourceList.append(videoSourceParrotDiscovery); videoSourceList.append(videoSourceYuneecMantisG); @@ -184,6 +186,98 @@ DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, tcpUrl) return _tcpUrlFact; } +DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, httpUrl) +{ + if (!_httpUrlFact) { + _httpUrlFact = _createSettingsFact(httpUrlName); + connect(_httpUrlFact, &Fact::valueChanged, this, &VideoSettings::_configChanged); + } + return _httpUrlFact; +} + +DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, httpTimeout) +{ + if (!_httpTimeoutFact) { + _httpTimeoutFact = _createSettingsFact(httpTimeoutName); + _httpTimeoutFact->setVisible( +#ifdef QGC_GST_STREAMING + true +#else + false +#endif + ); + connect(_httpTimeoutFact, &Fact::valueChanged, this, &VideoSettings::_configChanged); + } + return _httpTimeoutFact; +} + +DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, httpRetryAttempts) +{ + if (!_httpRetryAttemptsFact) { + _httpRetryAttemptsFact = _createSettingsFact(httpRetryAttemptsName); + _httpRetryAttemptsFact->setVisible( +#ifdef QGC_GST_STREAMING + true +#else + false +#endif + ); + connect(_httpRetryAttemptsFact, &Fact::valueChanged, this, &VideoSettings::_configChanged); + } + return _httpRetryAttemptsFact; +} + +DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, httpBufferSize) +{ + if (!_httpBufferSizeFact) { + _httpBufferSizeFact = _createSettingsFact(httpBufferSizeName); + _httpBufferSizeFact->setVisible( +#ifdef QGC_GST_STREAMING + true +#else + false +#endif + ); + connect(_httpBufferSizeFact, &Fact::valueChanged, this, &VideoSettings::_configChanged); + } + return _httpBufferSizeFact; +} + +DECLARE_SETTINGSFACT(VideoSettings, httpKeepAlive) +DECLARE_SETTINGSFACT(VideoSettings, httpUserAgent) + +DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, websocketUrl) +{ + if (!_websocketUrlFact) { + _websocketUrlFact = _createSettingsFact(websocketUrlName); + connect(_websocketUrlFact, &Fact::valueChanged, this, &VideoSettings::_configChanged); + } + return _websocketUrlFact; +} + +DECLARE_SETTINGSFACT_NO_FUNC(VideoSettings, websocketTimeout) +{ + if (!_websocketTimeoutFact) { + _websocketTimeoutFact = _createSettingsFact(websocketTimeoutName); + _websocketTimeoutFact->setVisible( +#ifdef QGC_GST_STREAMING + true +#else + false +#endif + ); + connect(_websocketTimeoutFact, &Fact::valueChanged, this, &VideoSettings::_configChanged); + } + return _websocketTimeoutFact; +} + +DECLARE_SETTINGSFACT(VideoSettings, websocketReconnectDelay) +DECLARE_SETTINGSFACT(VideoSettings, websocketHeartbeat) +DECLARE_SETTINGSFACT(VideoSettings, adaptiveQuality) +DECLARE_SETTINGSFACT(VideoSettings, minQuality) +DECLARE_SETTINGSFACT(VideoSettings, maxQuality) +DECLARE_SETTINGSFACT(VideoSettings, websocketBufferFrames) + bool VideoSettings::streamConfigured(void) { //-- First, check if it's autoconfigured @@ -216,6 +310,16 @@ bool VideoSettings::streamConfigured(void) qCDebug(VideoManagerLog) << "Testing configuration for MPEG-TS Stream:" << udpUrl()->rawValue().toString(); return !udpUrl()->rawValue().toString().isEmpty(); } + //-- If HTTP, check for URL + if(vSource == videoSourceHTTP) { + qCDebug(VideoManagerLog) << "Testing configuration for HTTP Stream:" << httpUrl()->rawValue().toString(); + return !httpUrl()->rawValue().toString().isEmpty(); + } + //-- If WebSocket, check for URL + if(vSource == videoSourceWebSocket) { + qCDebug(VideoManagerLog) << "Testing configuration for WebSocket Stream:" << websocketUrl()->rawValue().toString(); + return !websocketUrl()->rawValue().toString().isEmpty(); + } //-- If Herelink Air unit, good to go if(vSource == videoSourceHerelinkAirUnit) { qCDebug(VideoManagerLog) << "Stream configured for Herelink Air Unit"; diff --git a/src/Settings/VideoSettings.h b/src/Settings/VideoSettings.h index 8c0ad3a081bb..fbfec3824715 100644 --- a/src/Settings/VideoSettings.h +++ b/src/Settings/VideoSettings.h @@ -26,6 +26,20 @@ class VideoSettings : public SettingsGroup DEFINE_SETTINGFACT(udpUrl) DEFINE_SETTINGFACT(tcpUrl) DEFINE_SETTINGFACT(rtspUrl) + DEFINE_SETTINGFACT(httpUrl) + DEFINE_SETTINGFACT(httpTimeout) + DEFINE_SETTINGFACT(httpRetryAttempts) + DEFINE_SETTINGFACT(httpBufferSize) + DEFINE_SETTINGFACT(httpKeepAlive) + DEFINE_SETTINGFACT(httpUserAgent) + DEFINE_SETTINGFACT(websocketUrl) + DEFINE_SETTINGFACT(websocketTimeout) + DEFINE_SETTINGFACT(websocketReconnectDelay) + DEFINE_SETTINGFACT(websocketHeartbeat) + DEFINE_SETTINGFACT(adaptiveQuality) + DEFINE_SETTINGFACT(minQuality) + DEFINE_SETTINGFACT(maxQuality) + DEFINE_SETTINGFACT(websocketBufferFrames) DEFINE_SETTINGFACT(aspectRatio) DEFINE_SETTINGFACT(videoFit) DEFINE_SETTINGFACT(gridLines) @@ -45,6 +59,8 @@ class VideoSettings : public SettingsGroup Q_PROPERTY(QString udp265VideoSource READ udp265VideoSource CONSTANT) Q_PROPERTY(QString tcpVideoSource READ tcpVideoSource CONSTANT) Q_PROPERTY(QString mpegtsVideoSource READ mpegtsVideoSource CONSTANT) + Q_PROPERTY(QString httpVideoSource READ httpVideoSource CONSTANT) + Q_PROPERTY(QString websocketVideoSource READ websocketVideoSource CONSTANT) Q_PROPERTY(QString disabledVideoSource READ disabledVideoSource CONSTANT) bool streamConfigured (); @@ -53,6 +69,8 @@ class VideoSettings : public SettingsGroup QString udp265VideoSource () { return videoSourceUDPH265; } QString tcpVideoSource () { return videoSourceTCP; } QString mpegtsVideoSource () { return videoSourceMPEGTS; } + QString httpVideoSource () { return videoSourceHTTP; } + QString websocketVideoSource () { return videoSourceWebSocket; } QString disabledVideoSource () { return videoDisabled; } static constexpr const char* videoSourceNoVideo = QT_TRANSLATE_NOOP("VideoSettings", "No Video Available"); @@ -62,6 +80,8 @@ class VideoSettings : public SettingsGroup static constexpr const char* videoSourceUDPH265 = QT_TRANSLATE_NOOP("VideoSettings", "UDP h.265 Video Stream"); static constexpr const char* videoSourceTCP = QT_TRANSLATE_NOOP("VideoSettings", "TCP-MPEG2 Video Stream"); static constexpr const char* videoSourceMPEGTS = QT_TRANSLATE_NOOP("VideoSettings", "MPEG-TS Video Stream"); + static constexpr const char* videoSourceHTTP = QT_TRANSLATE_NOOP("VideoSettings", "HTTP MJPEG Stream"); + static constexpr const char* videoSourceWebSocket = QT_TRANSLATE_NOOP("VideoSettings", "WebSocket Video Stream"); static constexpr const char* videoSource3DRSolo = QT_TRANSLATE_NOOP("VideoSettings", "3DR Solo (requires restart)"); static constexpr const char* videoSourceParrotDiscovery = QT_TRANSLATE_NOOP("VideoSettings", "Parrot Discovery"); static constexpr const char* videoSourceYuneecMantisG = QT_TRANSLATE_NOOP("VideoSettings", "Yuneec Mantis G"); diff --git a/src/UI/AppSettings/VideoSettings.qml b/src/UI/AppSettings/VideoSettings.qml index cf8f8843ce7a..1a7079835b2f 100644 --- a/src/UI/AppSettings/VideoSettings.qml +++ b/src/UI/AppSettings/VideoSettings.qml @@ -30,6 +30,8 @@ SettingsPage { property bool _isRTSP: _isStreamSource && (_videoSource === _videoSettings.rtspVideoSource) property bool _isTCP: _isStreamSource && (_videoSource === _videoSettings.tcpVideoSource) property bool _isMPEGTS: _isStreamSource && (_videoSource === _videoSettings.mpegtsVideoSource) + property bool _isHTTP: _isStreamSource && (_videoSource === _videoSettings.httpVideoSource) + property bool _isWebSocket: _isStreamSource && (_videoSource === _videoSettings.websocketVideoSource) property bool _videoAutoStreamConfig: _videoManager.autoStreamConfigured property bool _videoSourceDisabled: _videoSource === _videoSettings.disabledVideoSource property real _urlFieldWidth: ScreenTools.defaultFontPixelWidth * 40 @@ -53,7 +55,7 @@ SettingsPage { SettingsGroupLayout { Layout.fillWidth: true heading: qsTr("Connection") - visible: !_videoSourceDisabled && !_videoAutoStreamConfig && (_isTCP || _isRTSP | _requiresUDPUrl) + visible: !_videoSourceDisabled && !_videoAutoStreamConfig && (_isTCP || _isRTSP || _requiresUDPUrl || _isHTTP || _isWebSocket) LabelledFactTextField { Layout.fillWidth: true @@ -78,6 +80,22 @@ SettingsPage { fact: _videoSettings.udpUrl visible: _requiresUDPUrl && _videoSettings.udpUrl.visible } + + LabelledFactTextField { + Layout.fillWidth: true + textFieldPreferredWidth: _urlFieldWidth + label: qsTr("HTTP URL") + fact: _videoSettings.httpUrl + visible: _isHTTP && _videoSettings.httpUrl.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + textFieldPreferredWidth: _urlFieldWidth + label: qsTr("WebSocket URL") + fact: _videoSettings.websocketUrl + visible: _isWebSocket && _videoSettings.websocketUrl.visible + } } SettingsGroupLayout { @@ -85,6 +103,14 @@ SettingsPage { heading: qsTr("Settings") visible: !_videoSourceDisabled + LabelledFactComboBox { + Layout.fillWidth: true + label: qsTr("Video Display Fit") + fact: _videoSettings.videoFit + visible: !_videoAutoStreamConfig && _isStreamSource && fact.visible + indexModel: false + } + LabelledFactTextField { Layout.fillWidth: true label: qsTr("Aspect Ratio") @@ -115,6 +141,104 @@ SettingsPage { } } + SettingsGroupLayout { + Layout.fillWidth: true + heading: qsTr("HTTP Network Optimization") + visible: _isHTTP && _isGST + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Connection Timeout") + fact: _videoSettings.httpTimeout + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Retry Attempts") + fact: _videoSettings.httpRetryAttempts + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Buffer Size") + fact: _videoSettings.httpBufferSize + visible: fact.visible + } + + FactCheckBoxSlider { + Layout.fillWidth: true + text: qsTr("HTTP Keep-Alive") + fact: _videoSettings.httpKeepAlive + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("User Agent") + fact: _videoSettings.httpUserAgent + visible: fact.visible + } + } + + SettingsGroupLayout { + Layout.fillWidth: true + heading: qsTr("WebSocket Advanced Settings") + visible: _isWebSocket && _isGST + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Connection Timeout") + fact: _videoSettings.websocketTimeout + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Reconnect Delay") + fact: _videoSettings.websocketReconnectDelay + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Heartbeat Interval") + fact: _videoSettings.websocketHeartbeat + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Buffer Frames") + fact: _videoSettings.websocketBufferFrames + visible: fact.visible + } + + FactCheckBoxSlider { + Layout.fillWidth: true + text: qsTr("Enable Adaptive Quality") + fact: _videoSettings.adaptiveQuality + visible: fact.visible + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Minimum Quality %") + fact: _videoSettings.minQuality + visible: _videoSettings.adaptiveQuality.rawValue && fact.visible + enabled: _videoSettings.adaptiveQuality.rawValue + } + + LabelledFactTextField { + Layout.fillWidth: true + label: qsTr("Maximum Quality %") + fact: _videoSettings.maxQuality + visible: _videoSettings.adaptiveQuality.rawValue && fact.visible + enabled: _videoSettings.adaptiveQuality.rawValue + } + } + SettingsGroupLayout { Layout.fillWidth: true heading: qsTr("Local Video Storage") diff --git a/src/VideoManager/VideoManager.cc b/src/VideoManager/VideoManager.cc index 4648b198e906..266e46c298e6 100644 --- a/src/VideoManager/VideoManager.cc +++ b/src/VideoManager/VideoManager.cc @@ -91,6 +91,8 @@ void VideoManager::init(QQuickWindow *mainWindow) (void) connect(_videoSettings->udpUrl(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged); (void) connect(_videoSettings->rtspUrl(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged); (void) connect(_videoSettings->tcpUrl(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged); + (void) connect(_videoSettings->httpUrl(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged); + (void) connect(_videoSettings->websocketUrl(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged); (void) connect(_videoSettings->aspectRatio(), &Fact::rawValueChanged, this, &VideoManager::aspectRatioChanged); (void) connect(_videoSettings->lowLatencyMode(), &Fact::rawValueChanged, this, [this](const QVariant &value) { Q_UNUSED(value); _restartAllVideos(); }); (void) connect(MultiVehicleManager::instance(), &MultiVehicleManager::activeVehicleChanged, this, &VideoManager::_setActiveVehicle); @@ -343,6 +345,8 @@ bool VideoManager::isStreamSource() const VideoSettings::videoSourceRTSP, VideoSettings::videoSourceTCP, VideoSettings::videoSourceMPEGTS, + VideoSettings::videoSourceHTTP, + VideoSettings::videoSourceWebSocket, VideoSettings::videoSource3DRSolo, VideoSettings::videoSourceParrotDiscovery, VideoSettings::videoSourceYuneecMantisG, @@ -533,6 +537,10 @@ bool VideoManager::_updateSettings(VideoReceiver *receiver) settingsChanged |= _updateVideoUri(receiver, _videoSettings->rtspUrl()->rawValue().toString()); } else if (source == VideoSettings::videoSourceTCP) { settingsChanged |= _updateVideoUri(receiver, QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString())); + } else if (source == VideoSettings::videoSourceHTTP) { + settingsChanged |= _updateVideoUri(receiver, _videoSettings->httpUrl()->rawValue().toString()); + } else if (source == VideoSettings::videoSourceWebSocket) { + settingsChanged |= _updateVideoUri(receiver, _videoSettings->websocketUrl()->rawValue().toString()); } else if (source == VideoSettings::videoSource3DRSolo) { settingsChanged |= _updateVideoUri(receiver, QStringLiteral("udp://0.0.0.0:5600")); } else if (source == VideoSettings::videoSourceParrotDiscovery) { diff --git a/src/VideoManager/VideoReceiver/GStreamer/CMakeLists.txt b/src/VideoManager/VideoReceiver/GStreamer/CMakeLists.txt index 057db82665df..4160a0f79c66 100644 --- a/src/VideoManager/VideoReceiver/GStreamer/CMakeLists.txt +++ b/src/VideoManager/VideoReceiver/GStreamer/CMakeLists.txt @@ -14,11 +14,23 @@ if(QGC_ENABLE_GST_VIDEOSTREAMING) if(NOT MACOS) # NOTE: Using FindGStreamer.cmake is currently bypassed on macOS # Using hardwired framework path as a workaround until FindGStreamer works on macOS - find_package(GStreamer - REQUIRED - COMPONENTS Core Base Video Gl GlPrototypes Rtsp - OPTIONAL_COMPONENTS GlEgl GlWayland GlX11 - ) + + # App component is required for desktop platforms (WebSocket support) + # but optional for Android (which doesn't include gstreamer-app-1.0) + if(ANDROID) + find_package(GStreamer + REQUIRED + COMPONENTS Core Base Video Gl GlPrototypes Rtsp + OPTIONAL_COMPONENTS App GlEgl GlWayland GlX11 + ) + else() + # Desktop platforms: App is required for WebSocket video streaming + find_package(GStreamer + REQUIRED + COMPONENTS Core Base Video Gl GlPrototypes Rtsp App + OPTIONAL_COMPONENTS GlEgl GlWayland GlX11 + ) + endif() endif() # Build GStreamer Qt6 QML GL plugin @@ -45,6 +57,42 @@ if(TARGET gstqml6gl) GstVideoReceiver.h ) + # WebSocket video streaming support (requires GStreamer App component) + # App is required on desktop platforms, optional on Android + if(ANDROID) + # Android: Check if App component is available + if(GStreamer_App_FOUND) + message(STATUS "QGC: GStreamer App component found - enabling WebSocket video streaming on Android") + set(QGC_WEBSOCKET_ENABLED TRUE) + else() + message(STATUS "QGC: GStreamer App component not found - WebSocket video streaming disabled (Android limitation)") + set(QGC_WEBSOCKET_ENABLED FALSE) + endif() + else() + # Desktop platforms: App is required, so WebSocket is always enabled + message(STATUS "QGC: WebSocket video streaming enabled (desktop platform)") + set(QGC_WEBSOCKET_ENABLED TRUE) + endif() + + if(QGC_WEBSOCKET_ENABLED) + target_link_libraries(${CMAKE_PROJECT_NAME} PRIVATE Qt6::WebSockets) + + # Link GStreamer App library for WebSocket video source (appsrc support) + if(WIN32) + target_link_libraries(${CMAKE_PROJECT_NAME} PRIVATE "${GSTREAMER_LIB_PATH}/gstapp-1.0.lib") + else() + target_link_libraries(${CMAKE_PROJECT_NAME} PRIVATE gstapp-1.0) + endif() + + target_sources(${CMAKE_PROJECT_NAME} + PRIVATE + QGCWebSocketVideoSource.cc + QGCWebSocketVideoSource.h + ) + + target_compile_definitions(${CMAKE_PROJECT_NAME} PRIVATE QGC_GST_APP_AVAILABLE) + endif() + # Build custom GStreamer QGC plugin add_subdirectory(gstqgc) diff --git a/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.cc b/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.cc index 7d86d99bc7a6..8a0a0211cca7 100644 --- a/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.cc +++ b/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.cc @@ -20,9 +20,17 @@ #include "GstVideoReceiver.h" #include "GStreamerHelpers.h" #include "QGCLoggingCategory.h" +#include "SettingsManager.h" +#include "VideoSettings.h" + +#ifdef QGC_GST_APP_AVAILABLE +#include "QGCWebSocketVideoSource.h" +#endif #include #include +#include +#include #include #include @@ -638,6 +646,10 @@ GstElement *GstVideoReceiver::_makeSource(const QString &input) const bool isUdp265 = input.contains("udp265://", Qt::CaseInsensitive); const bool isUdpMPEGTS = input.contains("mpegts://", Qt::CaseInsensitive); const bool isTcpMPEGTS = input.contains("tcp://", Qt::CaseInsensitive); + const bool isHttp = sourceUrl.scheme().startsWith("http", Qt::CaseInsensitive); +#ifdef QGC_GST_APP_AVAILABLE + const bool isWebSocket = sourceUrl.scheme().startsWith("ws", Qt::CaseInsensitive); +#endif GstElement *source = nullptr; GstElement *buffer = nullptr; @@ -647,6 +659,18 @@ GstElement *GstVideoReceiver::_makeSource(const QString &input) GstElement *srcbin = nullptr; do { + // Handle HTTP MJPEG streams + if (isHttp) { + return _makeHttpSource(input); + } + +#ifdef QGC_GST_APP_AVAILABLE + // Handle WebSocket streams + if (isWebSocket) { + return _makeWebSocketSource(input); + } +#endif + if (isRtsp) { if (!GStreamer::is_valid_rtsp_uri(input.toUtf8().constData())) { qCCritical(GstVideoReceiverLog) << "Invalid RTSP URI:" << input; @@ -798,6 +822,298 @@ GstElement *GstVideoReceiver::_makeSource(const QString &input) return srcbin; } +GstElement *GstVideoReceiver::_makeHttpSource(const QString &url) +{ + qCDebug(GstVideoReceiverLog) << "Creating HTTP MJPEG source for:" << url; + + GstElement *bin = nullptr; + GstElement *source = nullptr; + GstElement *queue = nullptr; + GstElement *multipartdemux = nullptr; + GstElement *jpegdec = nullptr; + GstPad *srcpad = nullptr; + GstPad *ghostpad = nullptr; + bool releaseElements = true; + + do { + // Create bin to hold HTTP source pipeline + bin = gst_bin_new("http_sourcebin"); + if (!bin) { + qCCritical(GstVideoReceiverLog) << "gst_bin_new('http_sourcebin') failed"; + break; + } + + // Create souphttpsrc element for HTTP/HTTPS support + source = gst_element_factory_make("souphttpsrc", "http_source"); + if (!source) { + qCCritical(GstVideoReceiverLog) << "gst_element_factory_make('souphttpsrc') failed - check GStreamer soup plugin installation"; + break; + } + + // Use safe defaults (can't access Settings from worker thread) + // These match the defaults from Video.SettingsGroup.json + uint32_t timeout = _timeout > 0 ? _timeout : 10; // 10 seconds default + uint32_t retries = 3; // 3 retry attempts default + uint32_t bufferSize = 32768; // 32KB default + bool keepAlive = true; // Keep-alive enabled by default + QString userAgent = QStringLiteral("QGroundControl/4.x"); + + // Configure souphttpsrc + g_object_set(source, + "location", url.toUtf8().constData(), + "is-live", TRUE, + "timeout", timeout, + "retries", retries, + "blocksize", bufferSize, + "keep-alive", keepAlive ? TRUE : FALSE, + "user-agent", userAgent.toUtf8().constData(), + nullptr); + + qCDebug(GstVideoReceiverLog) << "HTTP source configured - timeout:" << timeout + << "retries:" << retries << "buffer:" << bufferSize; + + // Create queue for buffering + queue = gst_element_factory_make("queue", "http_queue"); + if (!queue) { + qCCritical(GstVideoReceiverLog) << "gst_element_factory_make('queue') failed"; + break; + } + + // Configure queue based on low-latency mode + if (lowLatency()) { + // Low-latency mode: minimal buffering + g_object_set(queue, + "max-size-buffers", 2, + "max-size-time", (guint64)100000000, // 100ms + "leaky", 2, // downstream leaky + nullptr); + qCDebug(GstVideoReceiverLog) << "HTTP queue configured for low-latency mode"; + } else { + // Stable mode: more buffering + g_object_set(queue, + "max-size-buffers", 5, + "max-size-time", (guint64)500000000, // 500ms + "leaky", 2, // downstream leaky + nullptr); + qCDebug(GstVideoReceiverLog) << "HTTP queue configured for stable mode"; + } + + // Create multipartdemux for MJPEG boundary parsing + multipartdemux = gst_element_factory_make("multipartdemux", "multipart_demux"); + if (!multipartdemux) { + qCCritical(GstVideoReceiverLog) << "gst_element_factory_make('multipartdemux') failed - check GStreamer multipart plugin"; + break; + } + + // Set boundary to match PixEagle/standard MJPEG format + g_object_set(multipartdemux, + "boundary", "frame", + nullptr); + + // Create JPEG decoder + jpegdec = gst_element_factory_make("jpegdec", "jpeg_decoder"); + if (!jpegdec) { + qCCritical(GstVideoReceiverLog) << "gst_element_factory_make('jpegdec') failed"; + break; + } + + // Add all elements to bin + gst_bin_add_many(GST_BIN(bin), source, queue, multipartdemux, jpegdec, nullptr); + + // Link source → queue → multipartdemux + if (!gst_element_link_many(source, queue, multipartdemux, nullptr)) { + qCCritical(GstVideoReceiverLog) << "Failed to link HTTP source pipeline elements"; + break; + } + + // multipartdemux has dynamic pads, so we need to connect to pad-added signal + // We'll link multipartdemux → jpegdec when pad becomes available + struct PadLinkData { + GstElement *jpegdec; + GstElement *bin; + }; + + PadLinkData *linkData = new PadLinkData{jpegdec, bin}; + + g_signal_connect_data(multipartdemux, "pad-added", + G_CALLBACK(+[](GstElement *element, GstPad *pad, gpointer user_data) { + PadLinkData *data = static_cast(user_data); + GstPad *sinkpad = gst_element_get_static_pad(data->jpegdec, "sink"); + + if (sinkpad && !gst_pad_is_linked(sinkpad)) { + GstPadLinkReturn ret = gst_pad_link(pad, sinkpad); + if (ret != GST_PAD_LINK_OK) { + qCCritical(GstVideoReceiverLog) << "Failed to link multipartdemux to jpegdec:" << ret; + } else { + qCDebug(GstVideoReceiverLog) << "Successfully linked multipartdemux → jpegdec"; + } + } + + if (sinkpad) { + gst_object_unref(sinkpad); + } + }), + linkData, + +[](gpointer data, GClosure *) { + delete static_cast(data); + }, + static_cast(0)); + + // Create ghost pad from jpegdec's src pad + srcpad = gst_element_get_static_pad(jpegdec, "src"); + if (!srcpad) { + qCCritical(GstVideoReceiverLog) << "Failed to get jpegdec src pad"; + break; + } + + ghostpad = gst_ghost_pad_new("src", srcpad); + if (!ghostpad) { + qCCritical(GstVideoReceiverLog) << "gst_ghost_pad_new() failed"; + break; + } + + if (!gst_element_add_pad(bin, ghostpad)) { + qCCritical(GstVideoReceiverLog) << "gst_element_add_pad() failed"; + gst_clear_object(&ghostpad); + break; + } + + qCDebug(GstVideoReceiverLog) << "HTTP MJPEG source pipeline created successfully"; + + releaseElements = false; + ghostpad = nullptr; // Bin owns it now + + } while(0); + + // Cleanup on failure + if (releaseElements) { + gst_clear_object(&bin); + gst_clear_object(&jpegdec); + gst_clear_object(&multipartdemux); + gst_clear_object(&queue); + gst_clear_object(&source); + } + + if (srcpad) { + gst_object_unref(srcpad); + } + if (ghostpad) { + gst_object_unref(ghostpad); + } + + return bin; +} + +#ifdef QGC_GST_APP_AVAILABLE +GstElement *GstVideoReceiver::_makeWebSocketSource(const QString &url) +{ + qCDebug(GstVideoReceiverLog) << "Creating WebSocket video source for:" << url; + + // Use safe defaults (can't access Settings from worker thread) + // These match the defaults from Video.SettingsGroup.json + uint32_t timeout = _timeout > 0 ? _timeout : 10; // 10 seconds default + uint32_t reconnectDelay = 2000; // 2000ms default + uint32_t heartbeatInterval = 5000; // 5000ms default + uint32_t minQuality = 60; // 60% default + uint32_t maxQuality = 95; // 95% default + bool adaptiveQuality = true; // Enabled by default + + // Create WebSocket video source + // NOTE: Parent is nullptr because this runs in GstVideoWorker thread, + // but QGCWebSocketVideoSource needs to live on main Qt thread for WebSocket event loop + QGCWebSocketVideoSource *wsSource = new QGCWebSocketVideoSource( + url, + timeout, + reconnectDelay, + heartbeatInterval, + minQuality, + maxQuality, + adaptiveQuality, + nullptr // No parent - cleaned up via g_object_set_data_full + ); + + // CRITICAL: Move to main thread so Qt event loop can process WebSocket network events + wsSource->moveToThread(QCoreApplication::instance()->thread()); + + // Get the appsrc element from WebSocket source + GstElement *appsrc = wsSource->appsrcElement(); + if (!appsrc) { + qCCritical(GstVideoReceiverLog) << "Failed to create appsrc from WebSocket source"; + delete wsSource; + return nullptr; + } + + // Create JPEG decoder + GstElement *jpegdec = gst_element_factory_make("jpegdec", "ws_jpegdec"); + if (!jpegdec) { + qCCritical(GstVideoReceiverLog) << "gst_element_factory_make('jpegdec') failed"; + delete wsSource; + return nullptr; + } + + // Create bin to hold the pipeline + GstElement *bin = gst_bin_new("websocket_sourcebin"); + if (!bin) { + qCCritical(GstVideoReceiverLog) << "gst_bin_new('websocket_sourcebin') failed"; + gst_object_unref(jpegdec); + delete wsSource; + return nullptr; + } + + // Add elements to bin + gst_bin_add_many(GST_BIN(bin), appsrc, jpegdec, nullptr); + + // Link appsrc → jpegdec + if (!gst_element_link(appsrc, jpegdec)) { + qCCritical(GstVideoReceiverLog) << "Failed to link appsrc → jpegdec"; + gst_object_unref(bin); + delete wsSource; + return nullptr; + } + + // Create ghost pad from jpegdec's src pad + GstPad *srcpad = gst_element_get_static_pad(jpegdec, "src"); + if (!srcpad) { + qCCritical(GstVideoReceiverLog) << "Failed to get jpegdec src pad"; + gst_object_unref(bin); + delete wsSource; + return nullptr; + } + + GstPad *ghostpad = gst_ghost_pad_new("src", srcpad); + gst_object_unref(srcpad); + + if (!ghostpad) { + qCCritical(GstVideoReceiverLog) << "gst_ghost_pad_new() failed"; + gst_object_unref(bin); + delete wsSource; + return nullptr; + } + + if (!gst_element_add_pad(bin, ghostpad)) { + qCCritical(GstVideoReceiverLog) << "gst_element_add_pad() failed"; + gst_object_unref(ghostpad); + gst_object_unref(bin); + delete wsSource; + return nullptr; + } + + // Store wsSource in bin for cleanup + // IMPORTANT: Use deleteLater() instead of delete because the object lives on main thread + g_object_set_data_full(G_OBJECT(bin), "websocket-source", + wsSource, +[](gpointer data) { + QGCWebSocketVideoSource* ws = static_cast(data); + ws->deleteLater(); // Schedule deletion on correct thread + }); + + // Start WebSocket connection on main thread (where event loop exists) + QMetaObject::invokeMethod(wsSource, "start", Qt::QueuedConnection); + + qCDebug(GstVideoReceiverLog) << "WebSocket source pipeline created successfully"; + return bin; +} +#endif // QGC_GST_APP_AVAILABLE + GstElement *GstVideoReceiver::_makeDecoder(GstCaps *caps, GstElement *videoSink) { Q_UNUSED(caps); Q_UNUSED(videoSink) diff --git a/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.h b/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.h index 1099233eb5d7..5c0a3c062944 100644 --- a/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.h +++ b/src/VideoManager/VideoReceiver/GStreamer/GstVideoReceiver.h @@ -75,6 +75,10 @@ private slots: private: GstElement *_makeSource(const QString &input); + GstElement *_makeHttpSource(const QString &url); +#ifdef QGC_GST_APP_AVAILABLE + GstElement *_makeWebSocketSource(const QString &url); +#endif GstElement *_makeDecoder(GstCaps *caps = nullptr, GstElement *videoSink = nullptr); GstElement *_makeFileSink(const QString &videoFile, FILE_FORMAT format); diff --git a/src/VideoManager/VideoReceiver/GStreamer/QGCWebSocketVideoSource.cc b/src/VideoManager/VideoReceiver/GStreamer/QGCWebSocketVideoSource.cc new file mode 100644 index 000000000000..b39b0f815de3 --- /dev/null +++ b/src/VideoManager/VideoReceiver/GStreamer/QGCWebSocketVideoSource.cc @@ -0,0 +1,435 @@ +/**************************************************************************** + * + * (c) 2009-2024 QGROUNDCONTROL PROJECT + * + * QGroundControl is licensed according to the terms in the file + * COPYING.md in the root of the source code directory. + * + ****************************************************************************/ + +#include "QGCWebSocketVideoSource.h" +#include "QGCLoggingCategory.h" + +#include +#include +#include +#include + +QGC_LOGGING_CATEGORY(WebSocketVideoLog, "qgc.videomanager.websocket") + +QGCWebSocketVideoSource::QGCWebSocketVideoSource( + const QString &url, + int timeout, + int reconnectDelay, + int heartbeatInterval, + int minQuality, + int maxQuality, + bool adaptiveQuality, + QObject *parent) + : QObject(parent) + , _url(url) + , _timeout(timeout) + , _reconnectDelay(reconnectDelay) + , _heartbeatInterval(heartbeatInterval) + , _minQuality(minQuality) + , _maxQuality(maxQuality) + , _adaptiveQuality(adaptiveQuality) +{ + qCDebug(WebSocketVideoLog) << "Creating WebSocket video source:" << url; + + // Create WebSocket + _webSocket = new QWebSocket(QString(), QWebSocketProtocol::VersionLatest, this); + + // Connect WebSocket signals + connect(_webSocket, &QWebSocket::connected, this, &QGCWebSocketVideoSource::onConnected); + connect(_webSocket, &QWebSocket::disconnected, this, &QGCWebSocketVideoSource::onDisconnected); + connect(_webSocket, &QWebSocket::textMessageReceived, this, &QGCWebSocketVideoSource::onTextMessageReceived); + connect(_webSocket, &QWebSocket::binaryMessageReceived, this, &QGCWebSocketVideoSource::onBinaryMessageReceived); + connect(_webSocket, &QWebSocket::errorOccurred, this, &QGCWebSocketVideoSource::onError); + connect(_webSocket, &QWebSocket::sslErrors, this, &QGCWebSocketVideoSource::onSslErrors); + + // Create heartbeat timer + _heartbeatTimer = new QTimer(this); + _heartbeatTimer->setInterval(_heartbeatInterval); + connect(_heartbeatTimer, &QTimer::timeout, this, &QGCWebSocketVideoSource::onHeartbeatTimer); + + // Create reconnect timer + _reconnectTimer = new QTimer(this); + _reconnectTimer->setSingleShot(true); + connect(_reconnectTimer, &QTimer::timeout, this, &QGCWebSocketVideoSource::onReconnectTimer); + + // Create GStreamer appsrc element + createAppsrcElement(); +} + +QGCWebSocketVideoSource::~QGCWebSocketVideoSource() +{ + qCDebug(WebSocketVideoLog) << "Destroying WebSocket video source"; + stop(); + cleanupAppsrc(); +} + +void QGCWebSocketVideoSource::createAppsrcElement() +{ + _appsrc = gst_element_factory_make("appsrc", "websocket_appsrc"); + if (!_appsrc) { + qCCritical(WebSocketVideoLog) << "Failed to create appsrc element"; + return; + } + + // Configure appsrc for live streaming + g_object_set(_appsrc, + "is-live", TRUE, + "format", GST_FORMAT_TIME, + "do-timestamp", TRUE, + "min-latency", (gint64)0, + "max-bytes", (guint64)(1024 * 1024), // 1MB max queue + "block", FALSE, + "stream-type", GST_APP_STREAM_TYPE_STREAM, + nullptr); + + // Set caps for JPEG frames + GstCaps *caps = gst_caps_new_simple("image/jpeg", + "framerate", GST_TYPE_FRACTION, 30, 1, + nullptr); + g_object_set(_appsrc, "caps", caps, nullptr); + gst_caps_unref(caps); + + // Increase reference count so it's not destroyed when removed from bin + gst_object_ref(_appsrc); + + qCDebug(WebSocketVideoLog) << "appsrc element created successfully"; +} + +void QGCWebSocketVideoSource::start() +{ + if (_connected) { + qCDebug(WebSocketVideoLog) << "Already connected"; + return; + } + + qCDebug(WebSocketVideoLog) << "Starting WebSocket connection to:" << _url; + _shouldReconnect = true; + _connectionStartTime = QDateTime::currentMSecsSinceEpoch(); + + emit stateChanged("Connecting"); + _webSocket->open(QUrl(_url)); +} + +void QGCWebSocketVideoSource::stop() +{ + qCDebug(WebSocketVideoLog) << "Stopping WebSocket connection"; + _shouldReconnect = false; + + _heartbeatTimer->stop(); + _reconnectTimer->stop(); + + if (_webSocket->state() == QAbstractSocket::ConnectedState) { + _webSocket->close(); + } + + // Send EOS to appsrc + if (_appsrc) { + gst_app_src_end_of_stream(GST_APP_SRC(_appsrc)); + } + + emit stateChanged("Stopped"); +} + +void QGCWebSocketVideoSource::setQuality(int quality) +{ + if (quality < _minQuality || quality > _maxQuality) { + qCWarning(WebSocketVideoLog) << "Quality out of range:" << quality; + return; + } + + if (_currentQuality != quality) { + sendQualityRequest(quality); + } +} + +void QGCWebSocketVideoSource::onConnected() +{ + qCDebug(WebSocketVideoLog) << "WebSocket connected successfully"; + _connected = true; + _frameCount = 0; + _totalBytesReceived = 0; + _framesDropped = 0; + + emit connected(); + emit stateChanged("Connected"); + + // Start heartbeat + _heartbeatTimer->start(); + + // Stop reconnect attempts + _reconnectTimer->stop(); +} + +void QGCWebSocketVideoSource::onDisconnected() +{ + qCDebug(WebSocketVideoLog) << "WebSocket disconnected"; + _connected = false; + _expectingBinaryFrame = false; + + emit disconnected(); + emit stateChanged("Disconnected"); + + _heartbeatTimer->stop(); + + // Schedule reconnection if needed + if (_shouldReconnect) { + scheduleReconnect(); + } +} + +void QGCWebSocketVideoSource::onTextMessageReceived(const QString &message) +{ + QJsonDocument doc = QJsonDocument::fromJson(message.toUtf8()); + if (doc.isNull() || !doc.isObject()) { + qCWarning(WebSocketVideoLog) << "Invalid JSON message:" << message; + return; + } + + QJsonObject obj = doc.object(); + QString type = obj["type"].toString(); + + if (type == "frame") { + // Frame metadata - binary frame will follow + _expectingBinaryFrame = true; + _expectedFrameSize = obj["size"].toInt(); + + int quality = obj["quality"].toInt(); + if (quality > 0 && quality != _currentQuality) { + _currentQuality = quality; + emit qualityChanged(_currentQuality); + } + + qCDebug(WebSocketVideoLog) << "Frame metadata: size=" << _expectedFrameSize + << "quality=" << _currentQuality; + + } else if (type == "pong") { + // Heartbeat response + qCDebug(WebSocketVideoLog) << "Heartbeat acknowledged"; + + } else if (type == "error") { + QString errorMsg = obj["message"].toString(); + qCWarning(WebSocketVideoLog) << "Server error:" << errorMsg; + emit error(errorMsg); + + } else { + qCDebug(WebSocketVideoLog) << "Unknown message type:" << type; + } +} + +void QGCWebSocketVideoSource::onBinaryMessageReceived(const QByteArray &message) +{ + if (!_expectingBinaryFrame) { + qCWarning(WebSocketVideoLog) << "Unexpected binary message, size:" << message.size(); + _framesDropped++; + return; + } + + _expectingBinaryFrame = false; + _lastFrameTime = QDateTime::currentMSecsSinceEpoch(); + _frameCount++; + _totalBytesReceived += message.size(); + + qCDebug(WebSocketVideoLog) << "Frame received: size=" << message.size() + << "frame#" << _frameCount; + + // Push frame to GStreamer + pushFrameToAppsrc(message); + + // Update statistics + emit frameReceived(message.size()); + updateBandwidthEstimate(message.size()); +} + +void QGCWebSocketVideoSource::onError(QAbstractSocket::SocketError socketError) +{ + QString errorString = _webSocket->errorString(); + qCWarning(WebSocketVideoLog) << "WebSocket error:" << socketError << errorString; + + emit error(errorString); + emit stateChanged("Error"); + + // Trigger reconnection + if (_shouldReconnect && !_reconnectTimer->isActive()) { + scheduleReconnect(); + } +} + +void QGCWebSocketVideoSource::onSslErrors(const QList &errors) +{ + for (const QSslError &sslError : errors) { + qCWarning(WebSocketVideoLog) << "SSL Error:" << sslError.errorString(); + } + + // For development/testing, you might want to ignore SSL errors + // In production, proper certificate validation should be enforced + // _webSocket->ignoreSslErrors(); +} + +void QGCWebSocketVideoSource::onHeartbeatTimer() +{ + if (_connected) { + sendHeartbeat(); + + // Check for stale connection (no frames in 3x heartbeat interval) + qint64 currentTime = QDateTime::currentMSecsSinceEpoch(); + if (_lastFrameTime > 0 && (currentTime - _lastFrameTime) > _heartbeatInterval * 3) { + qCWarning(WebSocketVideoLog) << "Connection stale, no frames in" + << (currentTime - _lastFrameTime) << "ms"; + _webSocket->close(); + } + } +} + +void QGCWebSocketVideoSource::onReconnectTimer() +{ + if (_shouldReconnect && !_connected) { + qCDebug(WebSocketVideoLog) << "Attempting reconnection..."; + emit stateChanged("Reconnecting"); + _webSocket->open(QUrl(_url)); + } +} + +void QGCWebSocketVideoSource::pushFrameToAppsrc(const QByteArray &frameData) +{ + if (!_appsrc) { + qCWarning(WebSocketVideoLog) << "appsrc is null, cannot push frame"; + return; + } + + // Create GStreamer buffer + GstBuffer *buffer = gst_buffer_new_allocate(nullptr, frameData.size(), nullptr); + if (!buffer) { + qCWarning(WebSocketVideoLog) << "Failed to allocate GstBuffer"; + _framesDropped++; + return; + } + + // Copy frame data into buffer + GstMapInfo map; + if (!gst_buffer_map(buffer, &map, GST_MAP_WRITE)) { + qCWarning(WebSocketVideoLog) << "Failed to map GstBuffer"; + gst_buffer_unref(buffer); + _framesDropped++; + return; + } + + memcpy(map.data, frameData.constData(), frameData.size()); + gst_buffer_unmap(buffer, &map); + + // Let appsrc handle timestamps automatically (do-timestamp=TRUE) + // GStreamer will generate proper relative timestamps for live stream + GST_BUFFER_PTS(buffer) = GST_CLOCK_TIME_NONE; + GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE; + + // Push to appsrc + GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(_appsrc), buffer); + if (ret != GST_FLOW_OK) { + qCWarning(WebSocketVideoLog) << "Failed to push buffer to appsrc, ret:" << ret; + _framesDropped++; + } +} + +void QGCWebSocketVideoSource::sendQualityRequest(int quality) +{ + if (!_connected) return; + + QJsonObject obj; + obj["type"] = "quality"; + obj["quality"] = quality; + + QJsonDocument doc(obj); + _webSocket->sendTextMessage(doc.toJson(QJsonDocument::Compact)); + + qCDebug(WebSocketVideoLog) << "Quality request sent:" << quality; +} + +void QGCWebSocketVideoSource::sendHeartbeat() +{ + if (!_connected) return; + + QJsonObject obj; + obj["type"] = "ping"; + obj["timestamp"] = QDateTime::currentMSecsSinceEpoch(); + + QJsonDocument doc(obj); + _webSocket->sendTextMessage(doc.toJson(QJsonDocument::Compact)); + + qCDebug(WebSocketVideoLog) << "Heartbeat sent"; +} + +void QGCWebSocketVideoSource::updateBandwidthEstimate(int frameSize) +{ + qint64 currentTime = QDateTime::currentMSecsSinceEpoch(); + + // Add to history + _frameHistory.enqueue(qMakePair(currentTime, frameSize)); + + // Keep only recent frames + while (_frameHistory.size() > MAX_FRAME_HISTORY) { + _frameHistory.dequeue(); + } + + // Calculate bandwidth over the frame history window + if (_frameHistory.size() >= 10) { + qint64 oldestTime = _frameHistory.first().first; + qint64 timeSpan = currentTime - oldestTime; + + if (timeSpan > 0) { + int totalBytes = 0; + for (const auto &frame : _frameHistory) { + totalBytes += frame.second; + } + + _bandwidthBytesPerSecond = (totalBytes * 1000.0) / timeSpan; + emit bandwidthUpdated(_bandwidthBytesPerSecond); + + qCDebug(WebSocketVideoLog) << "Bandwidth:" << (_bandwidthBytesPerSecond / 1024.0) + << "KB/s, Quality:" << _currentQuality; + + // Adaptive quality adjustment + if (_adaptiveQuality) { + qreal targetBytesPerFrame = _bandwidthBytesPerSecond / 30.0; // Assume 30fps + + if (targetBytesPerFrame < 10000) { // < 10KB per frame + int newQuality = qMax(_minQuality, _currentQuality - 5); + if (newQuality != _currentQuality) { + qCDebug(WebSocketVideoLog) << "Reducing quality due to low bandwidth:" + << _currentQuality << "→" << newQuality; + sendQualityRequest(newQuality); + } + } else if (targetBytesPerFrame > 50000) { // > 50KB per frame + int newQuality = qMin(_maxQuality, _currentQuality + 5); + if (newQuality != _currentQuality) { + qCDebug(WebSocketVideoLog) << "Increasing quality due to high bandwidth:" + << _currentQuality << "→" << newQuality; + sendQualityRequest(newQuality); + } + } + } + } + } +} + +void QGCWebSocketVideoSource::scheduleReconnect() +{ + if (!_shouldReconnect) return; + + qCDebug(WebSocketVideoLog) << "Scheduling reconnection in" << _reconnectDelay << "ms"; + emit stateChanged("Waiting to reconnect"); + _reconnectTimer->start(_reconnectDelay); +} + +void QGCWebSocketVideoSource::cleanupAppsrc() +{ + if (_appsrc) { + gst_app_src_end_of_stream(GST_APP_SRC(_appsrc)); + gst_object_unref(_appsrc); + _appsrc = nullptr; + } +} diff --git a/src/VideoManager/VideoReceiver/GStreamer/QGCWebSocketVideoSource.h b/src/VideoManager/VideoReceiver/GStreamer/QGCWebSocketVideoSource.h new file mode 100644 index 000000000000..2cf10b397fd9 --- /dev/null +++ b/src/VideoManager/VideoReceiver/GStreamer/QGCWebSocketVideoSource.h @@ -0,0 +1,128 @@ +/**************************************************************************** + * + * (c) 2009-2024 QGROUNDCONTROL PROJECT + * + * QGroundControl is licensed according to the terms in the file + * COPYING.md in the root of the source code directory. + * + ****************************************************************************/ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +/** + * @brief WebSocket video source for QGroundControl + * + * Provides WebSocket-based video streaming with GStreamer appsrc integration. + * Implements bidirectional communication for adaptive quality control and + * automatic reconnection for robust video streaming from drones. + * + * Protocol (compatible with PixEagle): + * - Server sends JSON metadata followed by binary JPEG frame + * - Client can request quality adjustments and send heartbeats + */ +class QGCWebSocketVideoSource : public QObject +{ + Q_OBJECT + +public: + explicit QGCWebSocketVideoSource( + const QString &url, + int timeout = 10, + int reconnectDelay = 2000, + int heartbeatInterval = 5000, + int minQuality = 60, + int maxQuality = 95, + bool adaptiveQuality = true, + QObject *parent = nullptr + ); + ~QGCWebSocketVideoSource(); + + GstElement* appsrcElement() { return _appsrc; } + bool isConnected() const { return _connected; } + int currentQuality() const { return _currentQuality; } + qreal bandwidthEstimate() const { return _bandwidthBytesPerSecond; } + +public slots: + void start(); + void stop(); + void setQuality(int quality); + +signals: + void connected(); + void disconnected(); + void error(const QString &errorString); + void frameReceived(int size); + void qualityChanged(int quality); + void bandwidthUpdated(qreal bytesPerSecond); + void stateChanged(const QString &state); + +private slots: + void onConnected(); + void onDisconnected(); + void onTextMessageReceived(const QString &message); + void onBinaryMessageReceived(const QByteArray &message); + void onError(QAbstractSocket::SocketError socketError); + void onSslErrors(const QList &errors); + void onHeartbeatTimer(); + void onReconnectTimer(); + +private: + void createAppsrcElement(); + void pushFrameToAppsrc(const QByteArray &frameData); + void sendQualityRequest(int quality); + void sendHeartbeat(); + void updateBandwidthEstimate(int frameSize); + void scheduleReconnect(); + void cleanupAppsrc(); + + // WebSocket connection + QWebSocket *_webSocket = nullptr; + QString _url; + bool _connected = false; + bool _shouldReconnect = true; + + // GStreamer appsrc element + GstElement *_appsrc = nullptr; + + // Timers + QTimer *_heartbeatTimer = nullptr; + QTimer *_reconnectTimer = nullptr; + + // Frame state tracking + bool _expectingBinaryFrame = false; + int _expectedFrameSize = 0; + qint64 _lastFrameTime = 0; + quint64 _frameCount = 0; + + // Quality control + int _currentQuality = 85; + int _minQuality = 60; + int _maxQuality = 95; + bool _adaptiveQuality = true; + + // Bandwidth tracking for adaptive quality + QQueue> _frameHistory; // + qreal _bandwidthBytesPerSecond = 0; + static constexpr int MAX_FRAME_HISTORY = 30; // Track last 30 frames + + // Configuration + int _timeout = 10; + int _reconnectDelay = 2000; + int _heartbeatInterval = 5000; + + // Statistics + quint64 _totalBytesReceived = 0; + quint64 _framesDropped = 0; + qint64 _connectionStartTime = 0; +}; diff --git a/test/VideoStreaming/README.md b/test/VideoStreaming/README.md new file mode 100644 index 000000000000..5c64f54a4adf --- /dev/null +++ b/test/VideoStreaming/README.md @@ -0,0 +1,299 @@ +# Video Streaming Test Servers for QGroundControl + +This directory contains minimal test servers for validating QGroundControl's HTTP/HTTPS MJPEG and WebSocket video streaming capabilities without requiring physical cameras or external hardware. + +## Overview + +These test servers generate synthetic video patterns (color bars, moving elements, timestamps) and stream them using industry-standard protocols. They follow the same pattern as other QGC test utilities (e.g., `test/ADSB/ADSB_Simulator.py`). + +## Quick Start + +### 1. Install Dependencies + +```bash +# Navigate to this directory +cd test/VideoStreaming + +# Install required Python packages +pip install -r requirements.txt +``` + +### 2. Start a Test Server + +**HTTP MJPEG Server:** +```bash +python http_mjpeg_server.py +# Default: http://127.0.0.1:5077/video_feed +``` + +**WebSocket Server:** +```bash +python websocket_video_server.py +# Default: ws://127.0.0.1:5077/ws/video_feed +``` + +### 3. Configure QGroundControl + +**For HTTP MJPEG:** +1. Open QGroundControl Settings → Video +2. Set **Video Source** to `HTTP / HTTPS Video Stream` +3. Set **URL** to `http://127.0.0.1:5077/video_feed` +4. Click **Apply** +5. View video in the main display + +**For WebSocket:** +1. Open QGroundControl Settings → Video +2. Set **Video Source** to `WebSocket Video Stream` +3. Set **URL** to `ws://127.0.0.1:5077/ws/video_feed` +4. Click **Apply** +5. View video in the main display + +## Server Options + +Both servers support the same command-line arguments: + +```bash +python http_mjpeg_server.py --help +python websocket_video_server.py --help +``` + +Common options: +- `--host HOST` - Bind address (default: 127.0.0.1) +- `--port PORT` - Server port (default: 5077 for both servers) +- `--width WIDTH` - Video width in pixels (default: 640) +- `--height HEIGHT` - Video height in pixels (default: 480) +- `--fps FPS` - Frames per second (default: 30) +- `--quality QUALITY` - JPEG quality 0-100 (default: 85) + +### Examples + +**Custom resolution and frame rate:** +```bash +python http_mjpeg_server.py --width 1280 --height 720 --fps 60 +``` + +**Network accessible (for testing from another device):** +```bash +python http_mjpeg_server.py --host 0.0.0.0 --port 8080 +# Then use: http://:8080/video_feed in QGC +``` + +**Lower quality for bandwidth testing:** +```bash +python websocket_video_server.py --quality 50 --fps 15 +``` + +## Testing WebSocket in Browser + +The WebSocket server includes a built-in test page: + +1. Start the WebSocket server +2. Open in browser: http://127.0.0.1:5077/test +3. You should see the live video stream + +This helps verify the server is working before testing in QGC. + +## Alternative: Pure GStreamer Command-Line + +If you prefer not to use Python, you can test with GStreamer command-line tools directly. + +### HTTP MJPEG Streaming (GStreamer CLI) + +**Server side - Generate and stream MJPEG:** +```bash +# Simple test pattern over TCP +gst-launch-1.0 videotestsrc ! \ + video/x-raw,width=640,height=480,framerate=30/1 ! \ + jpegenc ! multipartmux ! \ + tcpserversink host=127.0.0.1 port=5000 + +# With more realistic test pattern +gst-launch-1.0 videotestsrc pattern=smpte ! \ + video/x-raw,width=640,height=480,framerate=30/1 ! \ + timeoverlay ! jpegenc quality=85 ! multipartmux ! \ + tcpserversink host=0.0.0.0 port=5000 +``` + +**Client side - Test reception (optional):** +```bash +gst-launch-1.0 tcpclientsrc host=127.0.0.1 port=5000 ! \ + multipartdemux ! jpegdec ! \ + videoconvert ! autovideosink +``` + +**Notes:** +- GStreamer's built-in `tcpserversink` doesn't provide HTTP headers, so you'll need an HTTP wrapper or use QGC's raw TCP support (if available) +- For true HTTP MJPEG, consider using `souphttpsrc` on the client side or third-party tools +- The Python scripts above are recommended as they provide proper HTTP headers and are easier to use + +### WebSocket Streaming (GStreamer CLI) + +Pure GStreamer command-line WebSocket streaming requires custom GStreamer plugins or external tools. **We recommend using the Python `websocket_video_server.py` script instead**, as it's simpler and more reliable. + +If you need a GStreamer-native solution, consider: +- **gst-rtsp-server** - For RTSP streaming (different protocol but GStreamer-native) +- **Custom appsrc/appsink** - Requires C/Python code (similar to our Python script) + +### RTSP Streaming (Alternative) + +RTSP is another common protocol supported by GStreamer: + +```bash +# Requires gst-rtsp-server (separate package) +gst-rtsp-server \ + --gst-debug=3 \ + --factory /test "videotestsrc ! x264enc ! rtph264pay name=pay0" + +# Then connect to: rtsp://127.0.0.1:8554/test +``` + +## Using with Real Video Sources + +While these scripts generate synthetic test patterns, you can easily modify them to use: + +### Webcam +```python +# In http_mjpeg_server.py or websocket_video_server.py +# Replace generate_frame() with: +cap = cv2.VideoCapture(0) # 0 = default webcam +ret, frame = cap.read() +``` + +### Video File +```python +# Replace generate_frame() with: +cap = cv2.VideoCapture('test_video.mp4') +ret, frame = cap.read() +if not ret: + cap.set(cv2.CAP_PROP_POS_FRAMES, 0) # Loop video + ret, frame = cap.read() +``` + +### External Tools + +Instead of these test scripts, you can also use: + +- **[PixEagle](https://github.com/alireza787b/PixEagle)** - Full-featured drone simulator with HTTP MJPEG video streaming +- **[GStreamer RTSP Server](https://gstreamer.freedesktop.org/documentation/gst-rtsp-server/)** - For RTSP protocol testing +- **[FFmpeg](https://ffmpeg.org/)** - For advanced streaming scenarios +- **[OBS Studio](https://obsproject.com/)** - Can stream via RTMP/RTSP with plugins + +## Troubleshooting + +### "ModuleNotFoundError: No module named 'fastapi'" +Install dependencies: `pip install -r requirements.txt` + +### "Address already in use" +Another service is using the port. Either: +- Stop the other service +- Use a different port: `--port 8080` + +### "Cannot connect from QGC" +1. Check firewall settings (allow Python or the specific port) +2. Verify the server is running (you should see startup logs) +3. Check the URL in QGC matches exactly (including http:// or ws://) +4. Try accessing http://127.0.0.1:5077/ in a browser to verify server is responding + +### Video is choppy or delayed +- Reduce frame rate: `--fps 15` +- Lower quality: `--quality 60` +- Reduce resolution: `--width 320 --height 240` + +### WebSocket disconnects immediately +- Check QGC logs for errors +- Verify WebSocket URL starts with `ws://` not `http://` +- Test with the browser test page first: http://127.0.0.1:5077/test + +## Technical Details + +### HTTP MJPEG Format + +The HTTP MJPEG server streams video using the `multipart/x-mixed-replace` content type, which is the standard for MJPEG-over-HTTP: + +``` +Content-Type: multipart/x-mixed-replace; boundary=frame + +--frame +Content-Type: image/jpeg + + +--frame +Content-Type: image/jpeg + + +... +``` + +### WebSocket Protocol (QGC/PixEagle Format) + +The WebSocket server implements the QGC/PixEagle protocol with a two-message sequence per frame: + +**1. Frame Metadata (Text JSON message):** +```json +{ + "type": "frame", + "size": 12345, + "quality": 85 +} +``` + +**2. Frame Data (Binary message):** +- Raw JPEG image bytes + +**Additional Protocol Messages:** + +**Heartbeat (QGC → Server):** +```json +{"type": "ping", "timestamp": 1234567890} +``` + +**Heartbeat Response (Server → QGC):** +```json +{"type": "pong"} +``` + +**Quality Change Request (QGC → Server):** +```json +{"type": "quality", "quality": 60} +``` + +**Error Message (Server → QGC):** +```json +{"type": "error", "message": "Error description"} +``` + +This protocol ensures proper frame synchronization and allows QGC to adapt video quality dynamically. + +### GStreamer Pipeline (QGC Side) + +When QGC receives these streams, it uses GStreamer pipelines similar to: + +**HTTP MJPEG:** +``` +souphttpsrc → queue → multipartdemux → jpegdec → [display/record] +``` + +**WebSocket:** +``` +appsrc → queue → jpegdec → [display/record] +``` + +## Contributing + +If you improve these test servers or add new features: +1. Ensure they remain minimal and easy to run +2. Keep dependencies limited (fastapi, uvicorn, opencv-python, numpy) +3. Update this README with new features +4. Test on multiple platforms (Windows, Linux, macOS) + +## License + +These test scripts follow the same license as QGroundControl itself. +See the root `COPYING.md` for details. + +## Related Resources + +- [QGroundControl Developer Guide](https://dev.qgroundcontrol.com/) +- [GStreamer Documentation](https://gstreamer.freedesktop.org/documentation/) +- [FastAPI Documentation](https://fastapi.tiangolo.com/) +- [OpenCV Python Documentation](https://docs.opencv.org/4.x/d6/d00/tutorial_py_root.html) diff --git a/test/VideoStreaming/http_mjpeg_server.py b/test/VideoStreaming/http_mjpeg_server.py new file mode 100644 index 000000000000..94527bb5a121 --- /dev/null +++ b/test/VideoStreaming/http_mjpeg_server.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python3 +""" +HTTP MJPEG Video Streaming Test Server for QGroundControl + +This script creates a simple HTTP server that streams MJPEG video, +allowing developers to test QGC's HTTP video streaming capabilities +without requiring physical cameras or external hardware. + +Usage: + python http_mjpeg_server.py [--host HOST] [--port PORT] [--fps FPS] + +Example: + python http_mjpeg_server.py --host 127.0.0.1 --port 5077 --fps 30 + +Default URL: http://127.0.0.1:5077/video_feed +""" + +import argparse +import asyncio +import time +from datetime import datetime +from typing import Generator + +import cv2 +import numpy as np +from fastapi import FastAPI +from fastapi.responses import StreamingResponse +import uvicorn + + +class VideoTestPattern: + """Generates test video frames with color bars and timestamp.""" + + def __init__(self, width: int = 640, height: int = 480, fps: int = 30): + self.width = width + self.height = height + self.fps = fps + self.frame_count = 0 + + def generate_frame(self) -> np.ndarray: + """Generate a test pattern frame with color bars, moving circle, and timestamp.""" + # Create base frame with color bars + frame = np.zeros((self.height, self.width, 3), dtype=np.uint8) + + # Define color bars (BGR format) + colors = [ + (255, 255, 255), # White + (0, 255, 255), # Yellow + (255, 255, 0), # Cyan + (0, 255, 0), # Green + (255, 0, 255), # Magenta + (0, 0, 255), # Red + (255, 0, 0), # Blue + (0, 0, 0), # Black + ] + + # Draw color bars + bar_width = self.width // len(colors) + for i, color in enumerate(colors): + x1 = i * bar_width + x2 = (i + 1) * bar_width if i < len(colors) - 1 else self.width + frame[:self.height // 2, x1:x2] = color + + # Draw moving circle in bottom half + circle_y = self.height * 3 // 4 + circle_x = int((self.frame_count % (self.width - 40)) + 20) + cv2.circle(frame, (circle_x, circle_y), 20, (0, 255, 0), -1) + + # Add timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + cv2.putText( + frame, + f"Frame: {self.frame_count} | {timestamp}", + (10, self.height - 10), + cv2.FONT_HERSHEY_SIMPLEX, + 0.5, + (255, 255, 255), + 1, + cv2.LINE_AA + ) + + # Add QGC test info + cv2.putText( + frame, + "QGroundControl HTTP MJPEG Test Stream", + (10, 30), + cv2.FONT_HERSHEY_SIMPLEX, + 0.6, + (0, 255, 255), + 2, + cv2.LINE_AA + ) + + self.frame_count += 1 + return frame + + +class MJPEGStreamer: + """Handles MJPEG stream generation.""" + + def __init__(self, pattern: VideoTestPattern, quality: int = 85): + self.pattern = pattern + self.quality = quality + self.clients = 0 + + async def generate_frames(self) -> Generator[bytes, None, None]: + """Generate MJPEG stream frames.""" + self.clients += 1 + client_id = self.clients + print(f"[Client {client_id}] Connected to MJPEG stream") + + try: + frame_duration = 1.0 / self.pattern.fps + + while True: + start_time = time.time() + + # Generate frame + frame = self.pattern.generate_frame() + + # Encode as JPEG + _, buffer = cv2.imencode( + '.jpg', + frame, + [cv2.IMWRITE_JPEG_QUALITY, self.quality] + ) + + # Yield frame in multipart format + yield ( + b'--frame\r\n' + b'Content-Type: image/jpeg\r\n\r\n' + + buffer.tobytes() + + b'\r\n' + ) + + # Maintain frame rate + elapsed = time.time() - start_time + sleep_time = max(0, frame_duration - elapsed) + await asyncio.sleep(sleep_time) + + except Exception as e: + print(f"[Client {client_id}] Disconnected: {e}") + finally: + print(f"[Client {client_id}] Stream ended") + + +def create_app(pattern: VideoTestPattern, quality: int = 85) -> FastAPI: + """Create FastAPI application.""" + app = FastAPI( + title="QGroundControl HTTP MJPEG Test Server", + description="Streams test video pattern via HTTP MJPEG for QGC testing" + ) + + streamer = MJPEGStreamer(pattern, quality) + + @app.get("/") + async def root(): + return { + "service": "QGroundControl HTTP MJPEG Test Server", + "video_feed": "/video_feed", + "resolution": f"{pattern.width}x{pattern.height}", + "fps": pattern.fps, + "quality": quality, + "usage": f"http://{app.state.host}:{app.state.port}/video_feed" + } + + @app.get("/video_feed") + async def video_feed(): + """MJPEG video stream endpoint.""" + return StreamingResponse( + streamer.generate_frames(), + media_type="multipart/x-mixed-replace; boundary=frame" + ) + + return app + + +def main(): + parser = argparse.ArgumentParser( + description="HTTP MJPEG Video Streaming Test Server for QGroundControl" + ) + parser.add_argument( + "--host", + default="127.0.0.1", + help="Host address to bind to (default: 127.0.0.1)" + ) + parser.add_argument( + "--port", + type=int, + default=5077, + help="Port to bind to (default: 5077)" + ) + parser.add_argument( + "--width", + type=int, + default=640, + help="Video width in pixels (default: 640)" + ) + parser.add_argument( + "--height", + type=int, + default=480, + help="Video height in pixels (default: 480)" + ) + parser.add_argument( + "--fps", + type=int, + default=30, + help="Frames per second (default: 30)" + ) + parser.add_argument( + "--quality", + type=int, + default=85, + help="JPEG quality 0-100 (default: 85)" + ) + + args = parser.parse_args() + + # Create test pattern generator + pattern = VideoTestPattern(args.width, args.height, args.fps) + + # Create FastAPI app + app = create_app(pattern, args.quality) + app.state.host = args.host + app.state.port = args.port + + print("=" * 70) + print("QGroundControl HTTP MJPEG Test Server") + print("=" * 70) + print(f"Video URL: http://{args.host}:{args.port}/video_feed") + print(f"Info URL: http://{args.host}:{args.port}/") + print(f"Resolution: {args.width}x{args.height}") + print(f"Frame Rate: {args.fps} FPS") + print(f"Quality: {args.quality}%") + print("=" * 70) + print("\nConfiguring QGroundControl:") + print(" 1. Open Settings → Video") + print(" 2. Set 'Video Source' to 'HTTP / HTTPS Video Stream'") + print(f" 3. Set 'URL' to: http://{args.host}:{args.port}/video_feed") + print(" 4. Click 'Apply' and view video in the main display") + print("\nPress Ctrl+C to stop the server") + print("=" * 70) + + # Run server + uvicorn.run(app, host=args.host, port=args.port, log_level="info") + + +if __name__ == "__main__": + main() diff --git a/test/VideoStreaming/requirements.txt b/test/VideoStreaming/requirements.txt new file mode 100644 index 000000000000..3c825b15e317 --- /dev/null +++ b/test/VideoStreaming/requirements.txt @@ -0,0 +1,4 @@ +fastapi>=0.115.0 +uvicorn[standard]>=0.32.0 +opencv-python>=4.10.0 +numpy>=1.24.0 diff --git a/test/VideoStreaming/websocket_video_server.py b/test/VideoStreaming/websocket_video_server.py new file mode 100644 index 000000000000..c4ba2fd6799d --- /dev/null +++ b/test/VideoStreaming/websocket_video_server.py @@ -0,0 +1,391 @@ +#!/usr/bin/env python3 +""" +WebSocket Video Streaming Test Server for QGroundControl + +This script creates a simple WebSocket server that streams JPEG video frames, +allowing developers to test QGC's WebSocket video streaming capabilities +without requiring physical cameras or external hardware. + +Usage: + python websocket_video_server.py [--host HOST] [--port PORT] [--fps FPS] + +Example: + python websocket_video_server.py --host 127.0.0.1 --port 5077 --fps 30 + +Default URL: ws://127.0.0.1:5077/ws/video_feed +""" + +import argparse +import asyncio +import json +import time +from datetime import datetime + +import cv2 +import numpy as np +from fastapi import FastAPI, WebSocket, WebSocketDisconnect +from fastapi.responses import HTMLResponse +import uvicorn + + +class VideoTestPattern: + """Generates test video frames with color bars and timestamp.""" + + def __init__(self, width: int = 640, height: int = 480, fps: int = 30): + self.width = width + self.height = height + self.fps = fps + self.frame_count = 0 + + def generate_frame(self) -> np.ndarray: + """Generate a test pattern frame with color bars, moving circle, and timestamp.""" + # Create base frame with color bars + frame = np.zeros((self.height, self.width, 3), dtype=np.uint8) + + # Define color bars (BGR format) + colors = [ + (255, 255, 255), # White + (0, 255, 255), # Yellow + (255, 255, 0), # Cyan + (0, 255, 0), # Green + (255, 0, 255), # Magenta + (0, 0, 255), # Red + (255, 0, 0), # Blue + (0, 0, 0), # Black + ] + + # Draw color bars + bar_width = self.width // len(colors) + for i, color in enumerate(colors): + x1 = i * bar_width + x2 = (i + 1) * bar_width if i < len(colors) - 1 else self.width + frame[:self.height // 2, x1:x2] = color + + # Draw moving circle in bottom half + circle_y = self.height * 3 // 4 + circle_x = int((self.frame_count % (self.width - 40)) + 20) + cv2.circle(frame, (circle_x, circle_y), 20, (0, 255, 0), -1) + + # Add timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + cv2.putText( + frame, + f"Frame: {self.frame_count} | {timestamp}", + (10, self.height - 10), + cv2.FONT_HERSHEY_SIMPLEX, + 0.5, + (255, 255, 255), + 1, + cv2.LINE_AA + ) + + # Add QGC test info + cv2.putText( + frame, + "QGroundControl WebSocket Test Stream", + (10, 30), + cv2.FONT_HERSHEY_SIMPLEX, + 0.6, + (255, 0, 255), + 2, + cv2.LINE_AA + ) + + self.frame_count += 1 + return frame + + +class WebSocketStreamer: + """Handles WebSocket video streaming.""" + + def __init__(self, pattern: VideoTestPattern, quality: int = 85): + self.pattern = pattern + self.quality = quality + self.clients = 0 + + async def handle_client_messages(self, websocket: WebSocket, client_id: int): + """Handle incoming messages from QGC client.""" + try: + while True: + message = await websocket.receive_text() + try: + data = json.loads(message) + msg_type = data.get("type") + + if msg_type == "ping": + # Respond to heartbeat (QGC expects "pong") + await websocket.send_text(json.dumps({"type": "pong"})) + timestamp = data.get("timestamp", "") + print(f"[Client {client_id}] Heartbeat (ping timestamp: {timestamp})") + + elif msg_type == "quality": + # Handle quality change request (QGC sends "quality", not "setQuality") + new_quality = data.get("quality", self.quality) + if 1 <= new_quality <= 100: + self.quality = new_quality + print(f"[Client {client_id}] Quality changed to {new_quality}") + else: + print(f"[Client {client_id}] Invalid quality: {new_quality}") + + else: + print(f"[Client {client_id}] Unknown message type: {msg_type}") + + except json.JSONDecodeError: + print(f"[Client {client_id}] Invalid JSON: {message}") + + except WebSocketDisconnect: + pass + except Exception as e: + print(f"[Client {client_id}] Message handler error: {e}") + + async def stream_frames(self, websocket: WebSocket, client_id: int): + """Stream video frames to client.""" + try: + frame_duration = 1.0 / self.pattern.fps + + while True: + start_time = time.time() + + # Generate frame + frame = self.pattern.generate_frame() + + # Encode as JPEG + _, buffer = cv2.imencode( + '.jpg', + frame, + [cv2.IMWRITE_JPEG_QUALITY, self.quality] + ) + + frame_bytes = buffer.tobytes() + + # Send frame metadata first (QGC/PixEagle protocol) + metadata = { + "type": "frame", + "size": len(frame_bytes), + "quality": self.quality + } + await websocket.send_text(json.dumps(metadata)) + + # Then send the actual JPEG frame as binary data + await websocket.send_bytes(frame_bytes) + + # Maintain frame rate + elapsed = time.time() - start_time + sleep_time = max(0, frame_duration - elapsed) + await asyncio.sleep(sleep_time) + + except WebSocketDisconnect: + pass + except Exception as e: + print(f"[Client {client_id}] Frame sender error: {e}") + + async def stream_to_client(self, websocket: WebSocket): + """Stream video frames to a WebSocket client with bidirectional communication.""" + self.clients += 1 + client_id = self.clients + print(f"[Client {client_id}] Connected to WebSocket stream") + + try: + # Run frame streaming and message handling concurrently + await asyncio.gather( + self.stream_frames(websocket, client_id), + self.handle_client_messages(websocket, client_id) + ) + + except WebSocketDisconnect: + print(f"[Client {client_id}] Disconnected normally") + except Exception as e: + print(f"[Client {client_id}] Disconnected: {e}") + finally: + print(f"[Client {client_id}] Stream ended") + + +def create_app(pattern: VideoTestPattern, quality: int = 85) -> FastAPI: + """Create FastAPI application.""" + app = FastAPI( + title="QGroundControl WebSocket Video Test Server", + description="Streams test video pattern via WebSocket for QGC testing" + ) + + streamer = WebSocketStreamer(pattern, quality) + + @app.get("/") + async def root(): + """Information endpoint.""" + return { + "service": "QGroundControl WebSocket Video Test Server", + "websocket_endpoint": "/ws/video_feed", + "resolution": f"{pattern.width}x{pattern.height}", + "fps": pattern.fps, + "quality": quality, + "usage": f"ws://{app.state.host}:{app.state.port}/ws/video_feed" + } + + @app.get("/test", response_class=HTMLResponse) + async def test_page(): + """Simple HTML test page to view the WebSocket stream in browser.""" + return f""" + + + + QGC WebSocket Video Test + + + +

QGroundControl WebSocket Video Test

+
Status: Connecting...
+ + + + + """ + + @app.websocket("/ws/video_feed") + async def websocket_endpoint(websocket: WebSocket): + """WebSocket video stream endpoint.""" + client_info = f"{websocket.client.host}:{websocket.client.port}" if websocket.client else "unknown" + print(f"\n{'='*70}") + print(f"WebSocket connection attempt from: {client_info}") + print(f"Endpoint: /ws/video_feed") + print(f"{'='*70}\n") + + await websocket.accept() + print(f"✓ WebSocket connection accepted from {client_info}\n") + + await streamer.stream_to_client(websocket) + + return app + + +def main(): + parser = argparse.ArgumentParser( + description="WebSocket Video Streaming Test Server for QGroundControl" + ) + parser.add_argument( + "--host", + default="127.0.0.1", + help="Host address to bind to (default: 127.0.0.1)" + ) + parser.add_argument( + "--port", + type=int, + default=5077, + help="Port to bind to (default: 5077)" + ) + parser.add_argument( + "--width", + type=int, + default=640, + help="Video width in pixels (default: 640)" + ) + parser.add_argument( + "--height", + type=int, + default=480, + help="Video height in pixels (default: 480)" + ) + parser.add_argument( + "--fps", + type=int, + default=30, + help="Frames per second (default: 30)" + ) + parser.add_argument( + "--quality", + type=int, + default=85, + help="JPEG quality 0-100 (default: 85)" + ) + + args = parser.parse_args() + + # Create test pattern generator + pattern = VideoTestPattern(args.width, args.height, args.fps) + + # Create FastAPI app + app = create_app(pattern, args.quality) + app.state.host = args.host + app.state.port = args.port + + print("=" * 70) + print("QGroundControl WebSocket Video Test Server") + print("=" * 70) + print(f"WebSocket URL: ws://{args.host}:{args.port}/ws/video_feed") + print(f"Info URL: http://{args.host}:{args.port}/") + print(f"Test Page: http://{args.host}:{args.port}/test") + print(f"Resolution: {args.width}x{args.height}") + print(f"Frame Rate: {args.fps} FPS") + print(f"Quality: {args.quality}%") + print("=" * 70) + print("\nConfiguring QGroundControl:") + print(" 1. Open Settings → Video") + print(" 2. Set 'Video Source' to 'WebSocket Video Stream'") + print(f" 3. Set 'URL' to: ws://{args.host}:{args.port}/ws/video_feed") + print(" 4. Click 'Apply' and view video in the main display") + print("\nYou can also test in browser:") + print(f" Open: http://{args.host}:{args.port}/test") + print("\nPress Ctrl+C to stop the server") + print("=" * 70) + + # Run server + uvicorn.run(app, host=args.host, port=args.port, log_level="info") + + +if __name__ == "__main__": + main() diff --git a/tools/setup/install-qt-debian.sh b/tools/setup/install-qt-debian.sh index e46119629b85..86bff45ddce9 100755 --- a/tools/setup/install-qt-debian.sh +++ b/tools/setup/install-qt-debian.sh @@ -9,7 +9,7 @@ QT_TARGET="${QT_TARGET:-desktop}" QT_ARCH="${QT_ARCH:-linux_gcc_64}" QT_ARCH_DIR="${QT_ARCH_DIR:-gcc_64}" QT_ROOT_DIR="${QT_ROOT_DIR:-${QT_PATH}/${QT_VERSION}/${QT_ARCH_DIR}}" -QT_MODULES="${QT_MODULES:-qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml}" +QT_MODULES="${QT_MODULES:-qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets}" echo "QT_VERSION $QT_VERSION" echo "QT_PATH $QT_PATH" diff --git a/tools/setup/install-qt-macos.sh b/tools/setup/install-qt-macos.sh index 210136b51736..44c432d78f38 100755 --- a/tools/setup/install-qt-macos.sh +++ b/tools/setup/install-qt-macos.sh @@ -6,7 +6,7 @@ QT_PATH="${QT_PATH:-/opt/Qt}" QT_HOST="${QT_HOST:-mac}" QT_TARGET="${QT_TARGET:-desktop}" QT_ARCH="${QT_ARCH:-mac}" -QT_MODULES="${QT_MODULES:-qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml}" +QT_MODULES="${QT_MODULES:-qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets}" set -e diff --git a/tools/setup/install-qt-windows.ps1 b/tools/setup/install-qt-windows.ps1 index 833514684e03..c649f4b54133 100644 --- a/tools/setup/install-qt-windows.ps1 +++ b/tools/setup/install-qt-windows.ps1 @@ -17,7 +17,7 @@ $QT_HOST = $env:QT_HOST -or 'windows' $QT_TARGET = $env:QT_TARGET -or 'desktop' # Windows arch must be one of: win64_msvc2017_64, win64_msvc2019_64, win64_mingw81, etc. :contentReference[oaicite:0]{index=0} $QT_ARCH = $env:QT_ARCH -or 'win64_msvc2022_64' -$QT_MODULES = $env:QT_MODULES -or 'qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml' +$QT_MODULES = $env:QT_MODULES -or 'qtcharts qtlocation qtpositioning qtspeech qt5compat qtmultimedia qtserialport qtimageformats qtshadertools qtconnectivity qtquick3d qtsensors qtscxml qtwebsockets' Write-Host "Using:" Write-Host " QT_VERSION = $QT_VERSION"