diff --git a/android/src/main/cpp/iris_rtc_rendering_android.cc b/android/src/main/cpp/iris_rtc_rendering_android.cc index adaf4133f..bd92f045d 100644 --- a/android/src/main/cpp/iris_rtc_rendering_android.cc +++ b/android/src/main/cpp/iris_rtc_rendering_android.cc @@ -12,6 +12,7 @@ #include #include #include +#include namespace agora { namespace iris { @@ -508,6 +509,10 @@ class YUVRendering final : public RenderingOp { uTextureLoc_ = glGetUniformLocation(program, "uTexture"); vTextureLoc_ = glGetUniformLocation(program, "vTexture"); + // Get locations for ColorSpace uniforms + colorMatrixLoc_ = glGetUniformLocation(program, "uColorMatrix"); + rangeLoc_ = glGetUniformLocation(program, "uRange"); + glGenTextures(3, texs_); CHECK_GL_ERROR() } @@ -618,12 +623,88 @@ class YUVRendering final : public RenderingOp { GL_LUMINANCE, GL_UNSIGNED_BYTE, vBuffer); CHECK_GL_ERROR() + if (colorMatrixLoc_ != -1) { + // BT.601 Full Range + float bt601_full[9] = { + 1.0f, 1.0f, 1.0f, + 0.0f, -0.344136f, 1.772f, + 1.402f, -0.714136f, 0.0f + }; + + // BT.601 Limited Range + float bt601_limit[9] = { + 1.164384f, 1.164384f, 1.164384f, + 0.0f, -0.391762f, 2.017232f, + 1.596027f, -0.812968f, 0.0f + }; + + // BT.709 Full Range + float bt709_full[9] = { + 1.0f, 1.0f, 1.0f, + 0.0f, -0.187324f, 1.8556f, + 1.5748f, -0.468124f, 0.0f + }; + + // BT.709 Limited Range + float bt709_limit[9] = { + 1.164384f, 1.164384f, 1.164384f, + 0.0f, -0.213249f, 2.112402f, + 1.792741f, -0.532909f, 0.0f + }; + + // BT.2020 Full Range + float bt2020_full[9] = { + 1.0f, 1.0f, 1.0f, + 0.0f, -0.164553f, 1.8814f, + 1.4746f, -0.571353f, 0.0f + }; + + // BT.2020 Limited Range + float bt2020_limit[9] = { + 1.167808f, 1.167808f, 1.167808f, + 0.0f, -0.187877f, 2.148072f, + 1.683611f, -0.652337f, 0.0f + }; + + float mat[9]; + int matrixId = (int)video_frame->colorSpace.matrix; + int rangeId = (int)video_frame->colorSpace.range; + + bool isFullRange = (rangeId == agora::media::base::ColorSpace::RANGEID_FULL); + + // Select matrix based on colorSpace.matrix and range + switch (matrixId) { + case agora::media::base::ColorSpace::MATRIXID_SMPTE170M: + case agora::media::base::ColorSpace::MATRIXID_BT470BG: + memcpy(mat, isFullRange ? bt601_full : bt601_limit, sizeof(mat)); + break; + case agora::media::base::ColorSpace::MATRIXID_BT709: + memcpy(mat, isFullRange ? bt709_full : bt709_limit, sizeof(mat)); + break; + case agora::media::base::ColorSpace::MATRIXID_BT2020_NCL: + case agora::media::base::ColorSpace::MATRIXID_BT2020_CL: + memcpy(mat, bt2020_full, sizeof(mat)); + break; + default: + memcpy(mat, isFullRange ? bt709_full : bt709_limit, sizeof(mat)); + break; + } + glUniformMatrix3fv(colorMatrixLoc_, 1, GL_FALSE, mat); + } + + if (rangeLoc_ != -1) { + int rangeId = (int)video_frame->colorSpace.range; + bool isFullRange = (rangeId == agora::media::base::ColorSpace::RANGEID_FULL); + int rangeVal = isFullRange ? 1 : 0; + glUniform1i(rangeLoc_, rangeVal); + + } + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); CHECK_GL_ERROR() gl_context_->Swap(); - // Clean up glDisableVertexAttribArray(aPositionLoc_); CHECK_GL_ERROR() glDisableVertexAttribArray(texCoordLoc_); @@ -642,8 +723,8 @@ class YUVRendering final : public RenderingOp { "attribute vec2 aTextCoord;\n" "varying vec2 vTextCoord;\n" "void main() {\n" - " vTextCoord = vec2(aTextCoord.x, 1.0 - aTextCoord.y);\n" - " gl_Position = aPosition;\n" + " gl_Position = aPosition;\n" + " vTextCoord = aTextCoord;\n" "}\n"; const char *frag_shader_yuv_ = @@ -652,15 +733,26 @@ class YUVRendering final : public RenderingOp { "uniform sampler2D yTexture;\n" "uniform sampler2D uTexture;\n" "uniform sampler2D vTexture;\n" + "uniform mat3 uColorMatrix;\n" + "uniform int uRange;\n" + "\n" "void main() {\n" - " vec3 yuv;\n" - " vec3 rgb;\n" - " yuv.r = texture2D(yTexture, vTextCoord).r;\n" - " yuv.g = texture2D(uTexture, vTextCoord).r - 0.5;\n" - " yuv.b = texture2D(vTexture, vTextCoord).r - 0.5;\n" - " rgb = mat3(1.0, 1.0, 1.0, 0.0, -0.39465, 2.03211, 1.13983, " - "-0.58060, 0.0) * yuv;\n" - " gl_FragColor = vec4(rgb, 1.0);\n" + " float y = texture2D(yTexture, vTextCoord).r;\n" + " float u = texture2D(uTexture, vTextCoord).r;\n" + " float v = texture2D(vTexture, vTextCoord).r;\n" + "\n" + " vec3 yuv;\n" + " if (uRange == 0) { // LIMITED: apply 0.0627 offset\n" + " yuv[0] = clamp(y, 0.0, 1.0) - 0.0627;\n" + " } else { // FULL: no offset\n" + " yuv[0] = clamp(y, 0.0, 1.0);\n" + " }\n" + " yuv[1] = clamp(u - 0.5, -0.5, 0.5);\n" + " yuv[2] = clamp(v - 0.5, -0.5, 0.5);\n" + "\n" + " vec3 rgb = uColorMatrix * yuv;\n" + "\n" + " gl_FragColor = vec4(clamp(rgb, 0.0, 1.0), 1.0);\n" "}\n"; // clang-format off @@ -680,6 +772,8 @@ class YUVRendering final : public RenderingOp { GLint yTextureLoc_; GLint uTextureLoc_; GLint vTextureLoc_; + GLint colorMatrixLoc_ = -1; + GLint rangeLoc_ = -1; std::unique_ptr shader_; }; diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle index fd7ed07e6..0366e4536 100644 --- a/example/android/app/build.gradle +++ b/example/android/app/build.gradle @@ -46,7 +46,7 @@ android { defaultConfig { // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). applicationId "io.agora.agora_rtc_ng_example" - minSdkVersion 21 + minSdkVersion flutter.minSdkVersion targetSdkVersion flutter.targetSdkVersion versionCode flutterVersionCode.toInteger() versionName flutterVersionName diff --git a/example/lib/examples/advanced/push_video_frame/push_video_frame.dart b/example/lib/examples/advanced/push_video_frame/push_video_frame.dart index b8d45b42e..61c52e66b 100644 --- a/example/lib/examples/advanced/push_video_frame/push_video_frame.dart +++ b/example/lib/examples/advanced/push_video_frame/push_video_frame.dart @@ -22,6 +22,7 @@ class PushVideoFrame extends StatefulWidget { class _State extends State { late final RtcEngine _engine; bool _isReadyPreview = false; + bool _isUseFlutterTexture = true; bool isJoined = false, switchCamera = true, switchRender = true; Set remoteUid = {}; @@ -140,7 +141,7 @@ class _State extends State { image.dispose(); } - Future _pushVideoFrame() async { + Future _pushVideoFrame({ColorSpace? colorSpace}) async { VideoPixelFormat format = VideoPixelFormat.videoPixelRgba; if (kIsWeb) { // TODO(littlegnal): https://github.com/flutter/flutter/issues/135409 @@ -149,67 +150,496 @@ class _State extends State { format = VideoPixelFormat.videoPixelBgra; } await _engine.getMediaEngine().pushVideoFrame( - frame: ExternalVideoFrame( - type: VideoBufferType.videoBufferRawData, - format: format, - buffer: _imageByteData, - stride: _imageWidth, - height: _imageHeight, - timestamp: DateTime.now().millisecondsSinceEpoch)); + frame: ExternalVideoFrame( + type: VideoBufferType.videoBufferRawData, + format: format, + buffer: _imageByteData, + stride: _imageWidth, + height: _imageHeight, + colorSpace: colorSpace, + timestamp: DateTime.now().millisecondsSinceEpoch, + )); } @override Widget build(BuildContext context) { - return ExampleActionsWidget( - displayContentBuilder: (context, isLayoutHorizontal) { - if (!_isReadyPreview) return Container(); - return AgoraVideoView( - controller: VideoViewController( - rtcEngine: _engine, - canvas: const VideoCanvas( - uid: 0, - sourceType: VideoSourceType.videoSourceCustom, - ), - ), - ); - }, - actionsBuilder: (context, isLayoutHorizontal) { - return Column( - mainAxisAlignment: MainAxisAlignment.start, - crossAxisAlignment: CrossAxisAlignment.start, - mainAxisSize: MainAxisSize.min, - children: [ - TextField( - controller: _controller, - decoration: const InputDecoration(hintText: 'Channel ID'), - ), - Row( - children: [ - Expanded( - flex: 1, - child: ElevatedButton( + return Stack( + children: [ + ExampleActionsWidget( + displayContentBuilder: (context, isLayoutHorizontal) { + if (!_isReadyPreview) return Container(); + return AgoraVideoView( + controller: VideoViewController( + rtcEngine: _engine, + useFlutterTexture: _isUseFlutterTexture, + canvas: const VideoCanvas( + uid: 0, + sourceType: VideoSourceType.videoSourceCustom, + ), + ), + ); + }, + // actionsBuilder: (context, isLayoutHorizontal) { + // return Column( + // mainAxisAlignment: MainAxisAlignment.start, + // crossAxisAlignment: CrossAxisAlignment.start, + // mainAxisSize: MainAxisSize.min, + // children: [ + // TextField( + // controller: _controller, + // decoration: const InputDecoration(hintText: 'Channel ID'), + // ), + // Row( + // children: [ + // Expanded( + // flex: 1, + // child: ElevatedButton( + // onPressed: isJoined ? _leaveChannel : _joinChannel, + // child: Text('${isJoined ? 'Leave' : 'Join'} channel'), + // ), + // ), + // const SizedBox(width: 10), + // ElevatedButton( + // onPressed: () => setState(() => _isUseFlutterTexture = !_isUseFlutterTexture), + // child: Text('Use Flutter Texture: $_isUseFlutterTexture'), + // ), + // ], + // ), + + // const SizedBox( + // height: 20, + // ), + // SizedBox( + // height: 100, + // width: 100, + // child: Image.asset('assets/agora-logo.png'), + // ), + // const Text('Push Image as Video Frame'), + // Column( + // children: [ + // ElevatedButton( + // onPressed: + // isJoined ? () => _pushVideoFrame(colorSpace: null) : null, + // child: const Text('Push Video Frame'), + // ), + // ElevatedButton( + // onPressed: isJoined + // ? () => _pushVideoFrame( + // colorSpace: const ColorSpace( + // primaries: PrimaryID.primaryidBt709, + // transfer: TransferID.transferidBt709, + // matrix: MatrixID.matrixidBt709, + // range: RangeID.rangeidFull, + // )) + // : null, + // child: const Text('Push Video Frame With Color Space'), + // ), + // ], + // ), + // ], + // ); + // }, + ), + Positioned( + bottom: 20, + left: 0, + right: 0, + child: Column( + children: [ + Row( + children: [ + ElevatedButton( onPressed: isJoined ? _leaveChannel : _joinChannel, child: Text('${isJoined ? 'Leave' : 'Join'} channel'), ), - ) - ], - ), - const SizedBox( - height: 20, - ), - SizedBox( - height: 100, - width: 100, - child: Image.asset('assets/agora-logo.png'), - ), - const Text('Push Image as Video Frame'), - ElevatedButton( - onPressed: isJoined ? _pushVideoFrame : null, - child: const Text('Push Video Frame'), - ), - ], - ); - }, + ElevatedButton( + onPressed: () => setState( + () => _isUseFlutterTexture = !_isUseFlutterTexture), + child: Text('Use Flutter Texture: $_isUseFlutterTexture'), + ), + ], + ), + SingleChildScrollView( + scrollDirection: Axis.horizontal, + child: Row( + mainAxisSize: MainAxisSize.min, + children: [ + ElevatedButton( + onPressed: () => _pushTestFrameWithColorSpace( + matrix: MatrixID.matrixidBt709, + range: RangeID.rangeidFull, + ), + child: const Text('BT.709'), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.blue), + ), + ElevatedButton( + onPressed: () => _pushTestFrameWithColorSpace( + matrix: MatrixID.matrixidBt2020Ncl, + range: RangeID.rangeidFull, + ), + child: const Text('BT.2020'), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.green), + ), + ElevatedButton( + onPressed: () => _pushTestFrameWithColorSpace( + matrix: MatrixID.matrixidSmpte170m, + range: RangeID.rangeidFull, + ), + child: const Text('SMPTE170M'), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.orange), + ), + ElevatedButton( + onPressed: () => _pushTestFrameWithColorSpace( + matrix: MatrixID.matrixidBt709, + range: RangeID.rangeidLimited, + ), + child: const Text('Limited Range'), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.purple), + ), + ], + ), + ), + ], + ), + ), + ], ); } + + // Add this method to your State class + void _pushTestFrameWithColorSpace({ + required MatrixID matrix, + required RangeID range, + }) { + if (isJoined) { + print( + 'Preparing to push color space test frame: Matrix=$matrix, Range=$range'); + + // 1. Generate more significant RGBA color block image data (for testing) + final rgbaData = + _generateEnhancedColorBarFrame(matrix: matrix, range: range); + + // 2. Create ExternalVideoFrame (use RGBA format with more significant color differences) + final frame = ExternalVideoFrame( + type: VideoBufferType.videoBufferRawData, + format: kIsWeb + ? VideoPixelFormat.videoPixelBgra + : VideoPixelFormat.videoPixelRgba, + buffer: rgbaData['buffer'], + stride: rgbaData['width'], + height: rgbaData['height'], + timestamp: DateTime.now().millisecondsSinceEpoch, + // 3. Apply your selected ColorSpace + colorSpace: ColorSpace( + primaries: + PrimaryID.primaryidBt709, // Fixed use of BT.709 as baseline + transfer: TransferID.transferidBt709, + matrix: matrix, + range: range, + ), + ); + + // 4. Push video frame + _engine.getMediaEngine().pushVideoFrame(frame: frame); + + print('Push video frame completed - RGBA test frame'); + print(' Format: ${kIsWeb ? "BGRA" : "RGBA"}'); + print(' Size: ${rgbaData['width']}x${rgbaData['height']}'); + print(' Matrix: $matrix'); + print(' Range: $range'); + print(' Expected effect: ${_getExpectedEffect(matrix, range)}'); + } else { + print('Please join the channel first'); + } + } + + String _getExpectedEffect(MatrixID matrix, RangeID range) { + String matrixEffect = ""; + switch (matrix) { + case MatrixID.matrixidBt709: + matrixEffect = "Blue enhanced"; + break; + case MatrixID.matrixidBt2020Ncl: + matrixEffect = "Green enhanced"; + break; + case MatrixID.matrixidSmpte170m: + matrixEffect = "Red enhanced"; + break; + default: + matrixEffect = "Default"; + } + + String rangeEffect = + range == RangeID.rangeidLimited ? "Brightness offset" : "Full range"; + return "$matrixEffect + $rangeEffect"; + } + + // Generate enhanced RGBA color block image (320x240, specifically for testing ColorSpace, adapted for mobile screens) + Map _generateEnhancedColorBarFrame({ + required MatrixID matrix, + required RangeID range, + }) { + const int width = 240; // Adjust to a size more suitable for mobile devices + const int height = 360; + const int bytesPerPixel = 4; // RGBA + final buffer = Uint8List(width * height * bytesPerPixel); + + // Create a more complex test pattern with various colors + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + final int offset = (y * width + x) * bytesPerPixel; + int r = 0, g = 0, b = 0; + + // Upper half: 8 vertical bars of basic colors + if (y < height / 2) { + int section = x ~/ (width / 8); + switch (section) { + case 0: + r = 255; + g = 255; + b = 255; + break; // White + case 1: + r = 255; + g = 255; + b = 0; + break; // Yellow + case 2: + r = 0; + g = 255; + b = 255; + break; // Cyan + case 3: + r = 0; + g = 255; + b = 0; + break; // Green + case 4: + r = 255; + g = 0; + b = 255; + break; // Magenta + case 5: + r = 255; + g = 0; + b = 0; + break; // Red + case 6: + r = 0; + g = 0; + b = 255; + break; // Blue + case 7: + r = 0; + g = 0; + b = 0; + break; // Black + } + } + // Lower half: gradient colors, easier to see color space differences + else { + int section = x ~/ (width / 4); + switch (section) { + case 0: // Red gradient + r = (x % (width / 4)) * 255 ~/ (width / 4); + g = 0; + b = 0; + break; + case 1: // Green gradient + r = 0; + g = (x % (width / 4)) * 255 ~/ (width / 4); + b = 0; + break; + case 2: // Blue gradient + r = 0; + g = 0; + b = (x % (width / 4)) * 255 ~/ (width / 4); + break; + case 3: // Grayscale gradient + int gray = (x % (width / 4)) * 255 ~/ (width / 4); + r = gray; + g = gray; + b = gray; + break; + } + } + + // Handle byte order according to different formats + if (kIsWeb) { + // Web platform uses BGRA format + buffer[offset] = b.toUnsigned(8); // B + buffer[offset + 1] = g.toUnsigned(8); // G + buffer[offset + 2] = r.toUnsigned(8); // R + buffer[offset + 3] = 255; // A + } else { + // Other platforms use RGBA format + buffer[offset] = r.toUnsigned(8); // R + buffer[offset + 1] = g.toUnsigned(8); // G + buffer[offset + 2] = b.toUnsigned(8); // B + buffer[offset + 3] = 255; // A + } + } + } + + // Draw text information on the image + String matrixName = matrix.toString().split('.').last; + String rangeName = range.toString().split('.').last; + // _drawTextOnBuffer(buffer, width, height, '$matrixName-$rangeName'); + + return { + 'width': width, + 'height': height, + 'buffer': buffer, + }; + } + + // Draw text on RGBA buffer + void _drawTextOnBuffer(Uint8List buffer, int width, int height, String text) { + // Text position: top center, leave some margin + const int textY = 150; + const int textHeight = 24; + const int textStartX = 50; + + // Create text background (black semi-transparent rectangle) + for (int y = textY - 5; y < textY + textHeight + 5; y++) { + for (int x = textStartX - 10; + x < width - textStartX + 10 && x < width; + x++) { + if (y >= 0 && y < height && x >= 0) { + final int offset = (y * width + x) * 4; + if (offset + 3 < buffer.length) { + buffer[offset] = 0; // R + buffer[offset + 1] = 0; // G + buffer[offset + 2] = 0; // B + buffer[offset + 3] = 180; // A (semi-transparent) + } + } + } + } + + // Simple pixel font drawing function + _drawSimpleText(buffer, width, height, text, textStartX, textY); + } + + // Simple pixel font drawing function + void _drawSimpleText(Uint8List buffer, int width, int height, String text, + int startX, int startY) { + const int charWidth = 8; + const int charHeight = 12; + + for (int i = 0; i < text.length; i++) { + final int charX = startX + i * charWidth; + if (charX + charWidth > width) break; + + _drawChar( + buffer, width, height, text[i], charX, startY, charWidth, charHeight); + } + } + + // Draw single character (simplified version, only draws some basic shapes) + void _drawChar(Uint8List buffer, int width, int height, String char, int x, + int y, int charWidth, int charHeight) { + // For simplicity, we draw character outlines with white pixels + // This is a very simplified implementation, more complex font rendering can be used in actual applications + + for (int dy = 0; dy < charHeight; dy++) { + for (int dx = 0; dx < charWidth; dx++) { + bool shouldDraw = false; + + // Simple character shape definitions (only implement a few commonly used characters and symbols) + switch (char) { + case 'M': + shouldDraw = (dx == 0 || + dx == charWidth - 1 || + (dy < charHeight / 2 && + (dx == charWidth / 4 || dx == 3 * charWidth / 4))); + break; + case 'a': + shouldDraw = (dy > charHeight / 2 && + (dx == 0 || + dx == charWidth - 1 || + dy == charHeight - 1 || + dy == charHeight * 2 / 3)); + break; + case 't': + shouldDraw = (dx == charWidth / 2 || + (dy == charHeight / 3 && dx < 3 * charWidth / 4)); + break; + case 'r': + shouldDraw = (dx == 0 || + (dy > charHeight / 2 && + dy < 2 * charHeight / 3 && + dx < charWidth / 2)); + break; + case 'i': + shouldDraw = + (dx == charWidth / 2 || (dy == 0 && dx == charWidth / 2)); + break; + case 'x': + shouldDraw = ((dx == 0 && dy == charHeight - 1) || + (dx == charWidth - 1 && dy == 0) || + dx == dy); + break; + case ':': + shouldDraw = (dx == charWidth / 2 && + (dy == charHeight / 3 || dy == 2 * charHeight / 3)); + break; + case ' ': + shouldDraw = false; + break; + case ',': + shouldDraw = (dx == charWidth / 2 && dy == charHeight - 2); + break; + case 'R': + shouldDraw = (dx == 0 || + (dy < charHeight / 2 && dx == charWidth - 1) || + dy == 0 || + dy == charHeight / 2); + break; + case 'n': + shouldDraw = (dx == 0 || + (dy > charHeight / 2 && dx == charWidth - 1) || + (dy == charHeight / 2 && dx < charWidth - 1)); + break; + case 'g': + shouldDraw = ((dy > charHeight / 3) && + (dx == 0 || + dx == charWidth - 1 || + dy == charHeight - 1 || + (dy == 2 * charHeight / 3 && dx < charWidth - 1))); + break; + case 'e': + shouldDraw = (dx == 0 || + dy == charHeight / 2 || + dy == 0 || + dy == charHeight - 1); + break; + default: + // For undefined characters, draw a simple rectangle + shouldDraw = (dx == 0 || + dx == charWidth - 1 || + dy == 0 || + dy == charHeight - 1); + } + + if (shouldDraw) { + final int pixelX = x + dx; + final int pixelY = y + dy; + if (pixelX >= 0 && pixelX < width && pixelY >= 0 && pixelY < height) { + final int offset = (pixelY * width + pixelX) * 4; + if (offset + 3 < buffer.length) { + buffer[offset] = 255; // R (white) + buffer[offset + 1] = 255; // G + buffer[offset + 2] = 255; // B + buffer[offset + 3] = 255; // A + } + } + } + } + } + } } diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index b9828c980..c80f875f7 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -21,7 +21,8 @@ Pod::Spec.new do |s| plugin_dev_path = File.join(File.dirname(File.realpath(__FILE__)), '.plugin_dev') if File.exist?(plugin_dev_path) puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' - s.vendored_frameworks = 'libs/*.xcframework' + s.vendored_frameworks = 'libs/*.xcframework', 'libs/*.framework' + s.dependency 'AgoraRtcEngine_iOS', '4.6.0' else # iris dependencies start s.dependency 'AgoraIrisRTC_iOS', '4.5.2.3-build.1' diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index 329691ae1..e5e6a58a4 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -20,6 +20,7 @@ A new flutter plugin project. if File.exist?(plugin_dev_path) puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework', 'libs/*.framework' + s.dependency 'AgoraRtcEngine_macOS', '4.6.0' else # iris dependencies start s.dependency 'AgoraIrisRTC_macOS', '4.5.2.2-build.3' diff --git a/shared/darwin/AgoraCVPixelBufferUtils.h b/shared/darwin/AgoraCVPixelBufferUtils.h index 4da4101a1..9a25eee46 100644 --- a/shared/darwin/AgoraCVPixelBufferUtils.h +++ b/shared/darwin/AgoraCVPixelBufferUtils.h @@ -23,6 +23,14 @@ + (CVPixelBufferRef _Nullable)copyCVPixelBuffer: (CVPixelBufferRef _Nonnull)sourcePixelBuffer; +/** + * Copies all attachments from source CVPixelBuffer to destination CVPixelBuffer. + * + * @param sourcePixelBuffer The source CVPixelBuffer to copy attachments from + * @param destPixelBuffer The destination CVPixelBuffer to copy attachments to + */ ++ (void)copyPixelBufferAttachments:(CVPixelBufferRef _Nonnull)sourcePixelBuffer + to:(CVPixelBufferRef _Nonnull)destPixelBuffer; #if defined(TARGET_OS_OSX) && TARGET_OS_OSX /** diff --git a/shared/darwin/AgoraCVPixelBufferUtils.mm b/shared/darwin/AgoraCVPixelBufferUtils.mm index 79a9ca936..01510e1c3 100644 --- a/shared/darwin/AgoraCVPixelBufferUtils.mm +++ b/shared/darwin/AgoraCVPixelBufferUtils.mm @@ -298,10 +298,34 @@ + (CVPixelBufferRef)copyCVPixelBuffer: CVPixelBufferRelease(destPixelBuffer); return nil; } + + // Copy all CVPixelBuffer attachments, including color space related properties + [self copyPixelBufferAttachments:sourcePixelBuffer to:destPixelBuffer]; return destPixelBuffer; } ++ (void)copyPixelBufferAttachments:(CVPixelBufferRef)sourcePixelBuffer + to:(CVPixelBufferRef)destPixelBuffer { + if (!sourcePixelBuffer || !destPixelBuffer) { + return; + } + + // Get all attachments of source CVPixelBuffer + CFDictionaryRef attachments = CVBufferGetAttachments(sourcePixelBuffer, kCVAttachmentMode_ShouldPropagate); + + if (attachments) { + // Copy all attachments to destination CVPixelBuffer + CFDictionaryRef copiedAttachments = CFDictionaryCreateCopy(kCFAllocatorDefault, attachments); + CVBufferSetAttachments(destPixelBuffer, copiedAttachments, kCVAttachmentMode_ShouldPropagate); + CFRelease(copiedAttachments); + + NSLog(@"Copied CVPixelBuffer attachments: %@", (__bridge NSDictionary*)attachments); + } else { + NSLog(@"No attachments found on source CVPixelBuffer"); + } +} + #if defined(TARGET_OS_OSX) && TARGET_OS_OSX + (BOOL)saveCVPixelBufferToFile:(CVPixelBufferRef)pixelBuffer name:(NSString *)name { diff --git a/shared/darwin/TextureRenderer.mm b/shared/darwin/TextureRenderer.mm index 9946a327d..7ebac8945 100644 --- a/shared/darwin/TextureRenderer.mm +++ b/shared/darwin/TextureRenderer.mm @@ -7,12 +7,96 @@ #import #import #import +#import #import #import using namespace agora::iris; +static const float g_color601_full[9] = { + 1.0f, 1.0f, 1.0f, + 0.000000f, -0.344136f, 1.772000f, + 1.402000f, -0.714136f, 0.00000f +}; + +static const float g_color601_limit[9] = { + 1.164384f, 1.164384f, 1.164384f, + 0.000000f, -0.391762f, 2.017232f, + 1.596027f, -0.812968f, 0.000000f +}; + +static const float g_color709_full[9] = { + 1.0f, 1.0f, 1.0f, + 0.000000f, -0.187324f, 1.855600f, + 1.574800f, -0.468124f, 0.00000f +}; + +static const float g_color709_limit[9] = { + 1.164384f, 1.164384f, 1.164384f, + 0.000000f, -0.213249f, 2.112402f, + 1.792741f, -0.532909f, 0.000000f +}; + +static const float g_color2020_full[9] = { + 1.0f, 1.0f, 1.0f, + 0.000000f, -0.164553f, 1.881400f, + 1.474600f, -0.571353f, 0.00000f +}; + +static const float g_color2020_limit[9] = { + 1.167808f, 1.167808f, 1.167808f, + 0.000000f, -0.187877f, 2.148072f, + 1.683611f, -0.652337f, 0.000000f +}; + +static const char* kColorSpaceShaderSrc = R"( +#include +#include +using namespace metal; + +kernel void processYUVColorSpace(texture2d yTexture [[texture(0)]], + texture2d uvTexture [[texture(1)]], + texture2d outputTexture [[texture(2)]], + constant float* colorMatrix [[buffer(0)]], + constant int& isFullRange [[buffer(1)]], + uint2 gid [[thread_position_in_grid]]) { + + if (gid.x >= yTexture.get_width() || gid.y >= yTexture.get_height()) { + return; + } + + // Read YUV data + float y = yTexture.read(gid).r; + float2 uv = uvTexture.read(gid / 2).rg; + + // Process YUV values according to GlGenericDrawer.java logic + float3 yuv; + if (isFullRange == 0) { + yuv.x = clamp(y, 0.0, 1.0) - 0.0627; // 0.0627 = 16.0/255.0 + yuv.y = clamp(uv.x - 0.5, -0.5, 0.5); + yuv.z = clamp(uv.y - 0.5, -0.5, 0.5); + } else { // Full Range + yuv.x = clamp(y, 0.0, 1.0); + yuv.y = clamp(uv.x - 0.5, -0.5, 0.5); + yuv.z = clamp(uv.y - 0.5, -0.5, 0.5); + } + + // Use color matrix for YUV to RGB conversion + float3x3 matrix = float3x3( + float3(colorMatrix[0], colorMatrix[1], colorMatrix[2]), + float3(colorMatrix[3], colorMatrix[4], colorMatrix[5]), + float3(colorMatrix[6], colorMatrix[7], colorMatrix[8]) + ); + + float3 rgb = matrix * yuv; + rgb = clamp(rgb, 0.0, 1.0); + + // Output as RGBA + outputTexture.write(float4(rgb, 1.0), gid); +} +)"; + @interface TextureRender () @property(nonatomic, weak) NSObject *textureRegistry; @@ -27,9 +111,27 @@ @interface TextureRender () /// The queue on which `latestPixelBuffer` property is accessed. @property(strong, nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; +// Metal related properties +@property(nonatomic, strong) id metalDevice; +@property(nonatomic, strong) id commandQueue; +@property(nonatomic, strong) id colorSpacePipelineState; +@property(nonatomic, strong) id colorMatrixBuffer; +@property(nonatomic) CVMetalTextureCacheRef textureCache; + +// Store current color space information +@property(nonatomic, assign) agora::media::base::ColorSpace currentColorSpace; +@property(nonatomic, assign) BOOL hasValidColorSpace; + +// ColorSpace processing methods +- (void)setupColorSpaceProcessing; +- (const float*)getColorMatrixForColorSpace:(const agora::media::base::ColorSpace&)colorSpace; +- (void)processColorSpace:(CVPixelBufferRef)pixelBuffer + colorSpace:(const agora::media::base::ColorSpace&)colorSpace; + @end namespace { + class RendererDelegate : public std::enable_shared_from_this, public agora::iris::VideoFrameObserverDelegate { public: @@ -60,6 +162,21 @@ void OnVideoFrameReceived(const void *videoFrame, return; } + // Apply color space to pixel buffer if provided + NSLog(@"OnVideoFrameReceived: frame=%dx%d, format=%u, colorSpace.validate=%d", + vf->width, vf->height, CVPixelBufferGetPixelFormatType(pixelBuffer), vf->colorSpace.validate()); + + // Store color space information, process when Flutter retrieves it + if (vf->colorSpace.validate()) { + strongRenderer.currentColorSpace = vf->colorSpace; + strongRenderer.hasValidColorSpace = YES; + NSLog(@"Stored color space for deferred processing: Matrix:%d, Range:%d", + vf->colorSpace.matrix, vf->colorSpace.range); + } else { + strongRenderer.hasValidColorSpace = NO; + NSLog(@"No valid color space received"); + } + if (pre_width_ != vf->width || pre_height_ != vf->height) { pre_width_ = vf->width; pre_height_ = vf->height; @@ -88,27 +205,18 @@ void OnVideoFrameReceived(const void *videoFrame, } __block CVPixelBufferRef previousPixelBuffer = nil; - // Use `dispatch_sync` to avoid unnecessary context switch under common - // non-contest scenarios; - // Under rare contest scenarios, it will not block for too long since - // the critical section is quite lightweight. - // - // Note: `dispatch_sync` will block the current thread, so we don't need - // to check if the renderer is still valid before accessing its - // properties. dispatch_sync(strongRenderer.pixelBufferSynchronizationQueue, ^{ previousPixelBuffer = strongRenderer.latestPixelBuffer; - // There has been a bug since RTC 4.4.0 that the pixel buffer ref count is not updated correctly, - // which will cause the flutter engine copy the wrong pixel buffer to the skia texture. - // So we need to copy the pixel buffer directly to work around this issue for now, after the issue is fixed, - // we can revert to the original code. #if defined(TARGET_OS_OSX) && TARGET_OS_OSX + NSLog(@"macOS: Copying CVPixelBuffer with attachments..."); strongRenderer.latestPixelBuffer = [AgoraCVPixelBufferUtils copyCVPixelBuffer:pixelBuffer]; #else + NSLog(@"iOS: Retaining original CVPixelBuffer..."); strongRenderer.latestPixelBuffer = CVPixelBufferRetain(pixelBuffer); #endif }); + if (previousPixelBuffer) { CVPixelBufferRelease(previousPixelBuffer); } @@ -166,6 +274,11 @@ @implementation TextureRender [[NSString stringWithFormat:@"io.agora.flutter.render_%lld", _textureId] UTF8String], nil); + + // Initialize color space state + self.hasValidColorSpace = NO; + memset(&_currentColorSpace, 0, sizeof(_currentColorSpace)); + [self setupColorSpaceProcessing]; } return self; } @@ -202,6 +315,21 @@ - (CVPixelBufferRef _Nullable)copyPixelBuffer { pixelBuffer = self.latestPixelBuffer; self.latestPixelBuffer = nil; }); + + // Key: Apply color space processing before returning to Flutter + if (pixelBuffer && self.hasValidColorSpace) { + NSLog(@"Applying Metal ColorSpace transform - Matrix:%d, Range:%d", + self.currentColorSpace.matrix, self.currentColorSpace.range); + + [self processColorSpace:pixelBuffer colorSpace:self.currentColorSpace]; + } else if (pixelBuffer) { + NSLog(@"Flutter requesting pixelBuffer but no valid color space"); + } else { + NSLog(@"No pixelBuffer available for Flutter Engine"); + } + + NSLog(@"Returning pixelBuffer to Flutter Engine"); + return pixelBuffer; } @@ -219,6 +347,247 @@ - (void)dispose { } } +#pragma mark - ColorSpace Processing + +- (void)setupColorSpaceProcessing { + // Initialize Metal device + self.metalDevice = MTLCreateSystemDefaultDevice(); + if (!self.metalDevice) { + NSLog(@"Metal is not supported on this device"); + return; + } + + self.commandQueue = [self.metalDevice newCommandQueue]; + + // Create texture cache + CVReturn result = CVMetalTextureCacheCreate( + kCFAllocatorDefault, NULL, self.metalDevice, NULL, &_textureCache); + if (result != kCVReturnSuccess) { + NSLog(@"Failed to create Metal texture cache: %d", result); + return; + } + + // Compile Metal Compute Shader + NSError *error = nil; + id library = [self.metalDevice newLibraryWithSource: + [NSString stringWithUTF8String:kColorSpaceShaderSrc] + options:nil + error:&error]; + if (!library) { + NSLog(@"Failed to create shader library: %@", error); + return; + } + + id function = [library newFunctionWithName:@"processYUVColorSpace"]; + if (!function) { + NSLog(@"Failed to create shader function"); + return; + } + + self.colorSpacePipelineState = [self.metalDevice + newComputePipelineStateWithFunction:function error:&error]; + if (!self.colorSpacePipelineState) { + NSLog(@"Failed to create pipeline state: %@", error); + return; + } + + // Create color matrix buffer + self.colorMatrixBuffer = [self.metalDevice newBufferWithLength:sizeof(float) * 9 + options:MTLResourceStorageModeShared]; + + NSLog(@"Metal ColorSpace shader setup completed"); +} + +- (const float*)getColorMatrixForColorSpace:(const agora::media::base::ColorSpace&)colorSpace { + // Fully reference GlGenericDrawer.java matrix selection logic + BOOL isFullRange = (colorSpace.range == agora::media::base::ColorSpace::RANGEID_FULL); + + switch (colorSpace.matrix) { + case agora::media::base::ColorSpace::MATRIXID_BT709: + NSLog(@"Using BT.709 matrix (%s range)", isFullRange ? "Full" : "Limited"); + return isFullRange ? g_color709_full : g_color709_limit; + + case agora::media::base::ColorSpace::MATRIXID_BT2020_NCL: + case agora::media::base::ColorSpace::MATRIXID_BT2020_CL: + NSLog(@"Using BT.2020 matrix (Full range - as per Android)"); + return g_color2020_full; + + case agora::media::base::ColorSpace::MATRIXID_SMPTE170M: + case agora::media::base::ColorSpace::MATRIXID_BT470BG: + default: + NSLog(@"Using BT.601/SMPTE170M matrix (%s range)", isFullRange ? "Full" : "Limited"); + return isFullRange ? g_color601_full : g_color601_limit; + } +} + +- (void)processColorSpace:(CVPixelBufferRef)pixelBuffer + colorSpace:(const agora::media::base::ColorSpace&)colorSpace { + + if (!pixelBuffer || !self.metalDevice || !self.colorSpacePipelineState) { + return; + } + + OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); + + // Only process YUV format + if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange && + pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + NSLog(@"Unsupported pixel format for color space processing: %u", pixelFormat); + return; + } + + // Get appropriate color matrix + const float* matrix = [self getColorMatrixForColorSpace:colorSpace]; + + // Copy matrix to Metal buffer + memcpy([self.colorMatrixBuffer contents], matrix, sizeof(float) * 9); + + @autoreleasepool { + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + // Create Metal textures + CVMetalTextureRef yTextureRef = NULL; + CVMetalTextureRef uvTextureRef = NULL; + + CVReturn result = CVMetalTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, self.textureCache, pixelBuffer, NULL, + MTLPixelFormatR8Unorm, width, height, 0, &yTextureRef); + + if (result != kCVReturnSuccess || !yTextureRef) { + NSLog(@"Failed to create Y texture: %d", result); + return; + } + + result = CVMetalTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, self.textureCache, pixelBuffer, NULL, + MTLPixelFormatRG8Unorm, width/2, height/2, 1, &uvTextureRef); + + if (result != kCVReturnSuccess || !uvTextureRef) { + NSLog(@"Failed to create UV texture: %d", result); + CFRelease(yTextureRef); + return; + } + + id yTexture = CVMetalTextureGetTexture(yTextureRef); + id uvTexture = CVMetalTextureGetTexture(uvTextureRef); + + // Create output texture descriptor + MTLTextureDescriptor *outputDesc = [[MTLTextureDescriptor alloc] init]; + outputDesc.pixelFormat = MTLPixelFormatRGBA8Unorm; + outputDesc.width = width; + outputDesc.height = height; + outputDesc.usage = MTLTextureUsageShaderWrite | MTLTextureUsageShaderRead; + + id outputTexture = [self.metalDevice newTextureWithDescriptor:outputDesc]; + + // Prepare range parameter (reference GlGenericDrawer.java) + int isFullRange = (colorSpace.range == agora::media::base::ColorSpace::RANGEID_FULL) ? 1 : 0; + id rangeBuffer = [self.metalDevice newBufferWithBytes:&isFullRange + length:sizeof(int) + options:MTLResourceStorageModeShared]; + + // Execute compute shader + id commandBuffer = [self.commandQueue commandBuffer]; + id computeEncoder = [commandBuffer computeCommandEncoder]; + + [computeEncoder setComputePipelineState:self.colorSpacePipelineState]; + [computeEncoder setTexture:yTexture atIndex:0]; + [computeEncoder setTexture:uvTexture atIndex:1]; + [computeEncoder setTexture:outputTexture atIndex:2]; + [computeEncoder setBuffer:self.colorMatrixBuffer offset:0 atIndex:0]; + [computeEncoder setBuffer:rangeBuffer offset:0 atIndex:1]; + + MTLSize threadgroupSize = MTLSizeMake(16, 16, 1); + MTLSize threadgroups = MTLSizeMake( + (width + threadgroupSize.width - 1) / threadgroupSize.width, + (height + threadgroupSize.height - 1) / threadgroupSize.height, 1); + + [computeEncoder dispatchThreadgroups:threadgroups threadsPerThreadgroup:threadgroupSize]; + [computeEncoder endEncoding]; + + [commandBuffer commit]; + [commandBuffer waitUntilCompleted]; + + // Key: Need to convert processed RGB data back to YUV format and write to original CVPixelBuffer + [self copyProcessedRGBABackToYUVPixelBuffer:outputTexture pixelBuffer:pixelBuffer]; + + // Cleanup + CFRelease(yTextureRef); + CFRelease(uvTextureRef); + + NSLog(@"Applied ColorSpace transform - Matrix:%d, Range:%d (%s)", + colorSpace.matrix, colorSpace.range, + (colorSpace.range == agora::media::base::ColorSpace::RANGEID_FULL) ? "Full" : "Limited"); + } +} + +- (void)copyProcessedRGBABackToYUVPixelBuffer:(id)rgbaTexture + pixelBuffer:(CVPixelBufferRef)pixelBuffer { + + // Lock CVPixelBuffer for writing + CVReturn result = CVPixelBufferLockBaseAddress(pixelBuffer, 0); + if (result != kCVReturnSuccess) { + NSLog(@"Failed to lock pixel buffer: %d", result); + return; + } + + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + // For simplicity, create a temporary RGB data buffer here + size_t rgbaDataSize = width * height * 4; + uint8_t* rgbaData = (uint8_t*)malloc(rgbaDataSize); + + // Read RGBA data from Metal texture + [rgbaTexture getBytes:rgbaData + bytesPerRow:width * 4 + fromRegion:MTLRegionMake2D(0, 0, width, height) + mipmapLevel:0]; + + // Get YUV plane pointers + uint8_t* yPlane = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); + uint8_t* uvPlane = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); + size_t yBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); + size_t uvBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); + + // Convert RGBA back to YUV (simplified conversion - using BT.709 matrix) + for (size_t y = 0; y < height; y++) { + for (size_t x = 0; x < width; x++) { + size_t rgbaIdx = (y * width + x) * 4; + uint8_t r = rgbaData[rgbaIdx]; + uint8_t g = rgbaData[rgbaIdx + 1]; + uint8_t b = rgbaData[rgbaIdx + 2]; + + // RGB to YUV conversion (BT.709) + int yVal = (int)(0.2126 * r + 0.7152 * g + 0.0722 * b); + int uVal = (int)(-0.1146 * r - 0.3854 * g + 0.5 * b + 128); + int vVal = (int)(0.5 * r - 0.4542 * g - 0.0458 * b + 128); + + // Clamp values + yVal = MAX(0, MIN(255, yVal)); + uVal = MAX(0, MIN(255, uVal)); + vVal = MAX(0, MIN(255, vVal)); + + // Write Y value + yPlane[y * yBytesPerRow + x] = (uint8_t)yVal; + + // Write UV values (subsampled) + if (y % 2 == 0 && x % 2 == 0) { + size_t uvY = y / 2; + size_t uvX = x / 2; + uvPlane[uvY * uvBytesPerRow + uvX * 2] = (uint8_t)uVal; + uvPlane[uvY * uvBytesPerRow + uvX * 2 + 1] = (uint8_t)vVal; + } + } + } + + free(rgbaData); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + NSLog(@"Copied processed RGBA back to YUV CVPixelBuffer"); +} + - (void)dealloc { if (self.irisRtcRendering) { // the delegateId is garenteed to be auto incremented, so we can just remove @@ -234,6 +603,19 @@ - (void)dealloc { self.latestPixelBuffer = nil; } }); + + // Clean up Metal resources + if (self.textureCache) { + CVMetalTextureCacheFlush(self.textureCache, 0); + CFRelease(self.textureCache); + self.textureCache = NULL; + } + + // Metal objects are automatically released (ARC) + self.metalDevice = nil; + self.commandQueue = nil; + self.colorSpacePipelineState = nil; + self.colorMatrixBuffer = nil; } @end