diff --git a/ScreenShare/SampleHandler.swift b/ScreenShare/SampleHandler.swift
index 6f625e3..71d6a87 100644
--- a/ScreenShare/SampleHandler.swift
+++ b/ScreenShare/SampleHandler.swift
@@ -9,9 +9,14 @@
import ReplayKit
import WebRTCiOSSDK
import WebRTC
+import CoreVideo
+import CoreImage
+import CoreMedia
-class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
+class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
+ let sharedDefault = UserDefaults(suiteName: "group.io.antmedia.sbd.webrtc.sample")! // for test
+
func clientHasError(_ message: String) {
let userInfo = [NSLocalizedFailureReasonErrorKey: message]
@@ -21,6 +26,32 @@ class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
func publishStarted(streamId: String) {
NSLog("Publish has started");
+// self.client.setFrameCapturer { [weak self] buffer, frame in
+// guard let self, let requiredFrame = self.sharedDefault.object(forKey: "screenShareFrame") as? [CGFloat] else {
+// return frame
+// }
+//
+// // topSafeArea iPhone 12 mini
+// let topSafeArea: CGFloat = 44
+// let convertedFrame = convertRectToBufferFrame(
+// rect: .init(x: requiredFrame[0], y: requiredFrame[1] + topSafeArea, width: requiredFrame[2], height: requiredFrame[3]),
+// screenSize: UIScreen.main.bounds.size,
+// bufferSize: frame.size
+// )
+// return convertedFrame
+// }
+ }
+
+ func convertRectToBufferFrame(rect: CGRect, screenSize: CGSize, bufferSize: CGSize) -> CGRect {
+ let scaleX = bufferSize.width / screenSize.width
+ let scaleY = bufferSize.height / screenSize.height
+
+ let projectedX = rect.origin.x * scaleX
+ let projectedY = rect.origin.y * scaleY
+ let projectedWidth = rect.size.width * scaleX
+ let projectedHeight = rect.size.height * scaleY
+
+ return CGRect(x: projectedX, y: projectedY, width: projectedWidth, height: projectedHeight)
}
func publishFinished(streamId: String) {
@@ -40,15 +71,14 @@ class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
- let sharedDefault = UserDefaults(suiteName: "group.io.antmedia.ios.webrtc.sample")!
+// original: group.io.antmedia.ios.webrtc.sample
streamId = sharedDefault.object(forKey: "streamId") as! String;
let url = sharedDefault.object(forKey: "url");
- let token = sharedDefault.object(forKey: "token");
+// let token = sharedDefault.object(forKey: "token") ;
let videoEnabledObject = sharedDefault.object(forKey:"videoEnabled") as! String;
- if videoEnabledObject == "false"
- {
+ if videoEnabledObject == "false" {
videoEnabled = false;
}
@@ -57,9 +87,10 @@ class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
audioEnabled = false;
}
- if ((streamId) == nil)
- {
- let userInfo = [NSLocalizedFailureReasonErrorKey: "StreamId is not specified. Please specify stream id in the container app"]
+ if ((streamId).isEmpty) {
+ let userInfo = [
+ NSLocalizedFailureReasonErrorKey: "StreamId is not specified. Please specify stream id in the container app"
+ ]
finishBroadcastWithError(NSError(domain: "ScreenShare", code: -1, userInfo: userInfo));
}
@@ -69,36 +100,34 @@ class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
finishBroadcastWithError(NSError(domain: "ScreenShare", code: -2, userInfo: userInfo));
}
else {
- NSLog("----> streamId: %@ , websocket url: %@, videoEnabled: %d , audioEnabled: %d", streamId as! String, url as! String,
+ NSLog("----> streamId: %@ , websocket url: %@, videoEnabled: %d , audioEnabled: %d", streamId, url as! String,
videoEnabled, audioEnabled);
self.client.delegate = self
self.client.setDebug(true)
self.client.setUseExternalCameraSource(useExternalCameraSource: true)
self.client.setWebSocketServerUrl(url: url as! String)
-
- if (videoEnabled != nil) {
- self.client.setVideoEnable(enable: videoEnabled as! Bool);
+
+ if (videoEnabled != false) {
+ self.client.setVideoEnable(enable: videoEnabled);
self.client.setExternalVideoCapture(externalVideoCapture: true);
}
//in some ipad versions, resolution/aspect ratio is critical to set, otherwise iOS encoder may not encode the frames and
//server side reports publishTimeout because server cannot get the video frames
- self.client.setTargetResolution(width: 1280, height: 720);
- self.client.setMaxVideoBps(videoBitratePerSecond: 2000000)
-
+
+ self.client.setTargetResolution(width: 1920, height: 1080);
+ self.client.setMaxVideoBps(videoBitratePerSecond: 5000000)
self.client.setExternalAudio(externalAudioEnabled: true)
//In some devices iphone version, frames are dropped due to encoder queue and it causes glitches in the playback
//Decreasing the fps provides a better playback expeience.
//Alternatively, target resolution can be decreased above to let encoder work faster
- self.client.setTargetFps(fps: 10)
+ self.client.setTargetFps(fps: 30)
- self.client.publish(streamId: streamId as! String);
+ self.client.publish(streamId: streamId);
}
-
-
}
override func broadcastPaused() {
@@ -119,7 +148,7 @@ class SampleHandler: RPBroadcastSampleHandler, AntMediaClientDelegate {
// Handle video sample buffer
//NSLog("processSamplebuffer video");
if videoEnabled {
- self.client.deliverExternalVideo(sampleBuffer: sampleBuffer);
+ self.client.deliverExternalVideo(sampleBuffer: sampleBuffer)
}
break
case RPSampleBufferType.audioApp:
diff --git a/ScreenShare/ScreenShare.entitlements b/ScreenShare/ScreenShare.entitlements
index 2099144..3e03c56 100644
--- a/ScreenShare/ScreenShare.entitlements
+++ b/ScreenShare/ScreenShare.entitlements
@@ -4,7 +4,8 @@
com.apple.security.application-groups
- group.io.antmedia.ios.webrtc.sample
+ group.io.antmedia.sbd.webrtc.sample
+ group.io.sbd.chatdemo
diff --git a/WebRTC-Sample-App/Base.lproj/Main.storyboard b/WebRTC-Sample-App/Base.lproj/Main.storyboard
index 9796ef7..f19da1f 100644
--- a/WebRTC-Sample-App/Base.lproj/Main.storyboard
+++ b/WebRTC-Sample-App/Base.lproj/Main.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -19,7 +19,7 @@
-
+
@@ -27,17 +27,17 @@
-
+
-
+
@@ -59,14 +59,14 @@ WebRTC Live Streaming
-
+
-
+
+
-
+
@@ -141,9 +142,9 @@ WebRTC Live Streaming
-
+
-
+
diff --git a/WebRTC-Sample-App/ShareScreenController.swift b/WebRTC-Sample-App/ShareScreenController.swift
new file mode 100644
index 0000000..a0f21d2
--- /dev/null
+++ b/WebRTC-Sample-App/ShareScreenController.swift
@@ -0,0 +1,96 @@
+//
+// ShareScreenController.swift
+// WebRTC-Sample-App
+//
+// Created by Muhammadjon Tohirov on 08/02/25.
+//
+
+import Foundation
+import UIKit
+import WebRTCiOSSDK
+import ReplayKit
+
+final class ShareScreenController: UIViewController {
+ var broadcastPicker: RPSystemBroadcastPickerView!
+ private let draggableRectangle = UIView()
+ private var lastLocation = CGPoint.zero
+ private let userDefaults: UserDefaults = UserDefaults(suiteName: "group.io.antmedia.sbd.webrtc.sample")!
+ private let imageView: UIImageView = .init()
+ override func viewDidLoad() {
+ super.viewDidLoad()
+ self.setupSubviews()
+ self.setupShareScreen()
+ self.setupDraggableRectangle()
+ }
+
+ override func viewDidLayoutSubviews() {
+ super.viewDidLayoutSubviews()
+ broadcastPicker.frame = .init(
+ x: (view.bounds.width - broadcastPicker.bounds.width) / 2,
+ y: view.bounds.height - broadcastPicker.bounds.height - 20,
+ width: broadcastPicker.bounds.width,
+ height: broadcastPicker.bounds.height
+ )
+
+ imageView.frame = view.bounds
+ }
+
+ private func setupShareScreen() {
+ let ssBundle = "antmedia.sbd.sample.screen"
+ broadcastPicker.preferredExtension = ssBundle;
+ }
+
+ private func setupSubviews() {
+ broadcastPicker = RPSystemBroadcastPickerView(frame: .init(x: 0, y: 0, width: 50, height: 50))
+ view.addSubview(imageView)
+ view.addSubview(broadcastPicker)
+ view.addSubview(draggableRectangle)
+ view.backgroundColor = .secondarySystemBackground
+
+ imageView.image = UIImage(named: "logo")
+ imageView.contentMode = .scaleAspectFit
+ }
+
+ private func setupDraggableRectangle() {
+ draggableRectangle.frame = CGRect(x: 50, y: 100, width: 200, height: 200)
+ draggableRectangle.layer.borderColor = UIColor.black.cgColor
+ draggableRectangle.layer.borderWidth = 2
+ draggableRectangle.backgroundColor = UIColor.clear
+
+ let panGesture = UIPanGestureRecognizer(target: self, action: #selector(handlePanGesture(_:)))
+ draggableRectangle.addGestureRecognizer(panGesture)
+ }
+
+ @objc private func handlePanGesture(_ gesture: UIPanGestureRecognizer) {
+ let translation = gesture.translation(in: self.view)
+
+ if let view = gesture.view {
+ let newX = view.center.x + translation.x
+ let newY = view.center.y + translation.y
+
+ // Ensure it stays within bounds
+ let minX = view.safeAreaInsets.left + view.frame.width / 2
+ let maxX = self.view.frame.width - view.safeAreaInsets.right - view.frame.width / 2
+ let minY = self.view.safeAreaInsets.top + view.frame.height / 2
+ let maxY = broadcastPicker.frame.minY - view.frame.height / 2
+
+ let clampedX = max(minX, min(newX, maxX))
+ let clampedY = max(minY, min(newY, maxY))
+
+ view.center = CGPoint(x: clampedX, y: clampedY)
+ gesture.setTranslation(.zero, in: self.view)
+
+ if gesture.state == .ended {
+ print("Final Rectangle Frame: \(view.frame)")
+ publishFrame(view.frame)
+ }
+ }
+ }
+
+ func publishFrame(_ frame: CGRect) {
+ let value: [CGFloat] = [
+ frame.minX, frame.minY, frame.width, frame.height
+ ]
+ userDefaults.set(value, forKey: "screenShareFrame")
+ }
+}
diff --git a/WebRTC-Sample-App/VideoViewController.swift b/WebRTC-Sample-App/VideoViewController.swift
index 30859b5..b7d0b8f 100644
--- a/WebRTC-Sample-App/VideoViewController.swift
+++ b/WebRTC-Sample-App/VideoViewController.swift
@@ -260,14 +260,17 @@ extension VideoViewController: AntMediaClientDelegate {
print("Local stream added")
self.fullVideoView.isHidden = false
- var localVideoTrack:RTCVideoTrack? = self.client?.getLocalVideoTrack();
+ let localVideoTrack:RTCVideoTrack? = self.client?.getLocalVideoTrack();
- print("local video trackId:\(localVideoTrack?.trackId)");
+ print("local video trackId:\(localVideoTrack?.trackId ?? "--")");
- var localAudioTrack:RTCAudioTrack? = self.client?.getLocalAudioTrack();
-
- print("local audio trackId:\(localAudioTrack?.trackId)");
+ let localAudioTrack:RTCAudioTrack? = self.client?.getLocalAudioTrack();
+ print("local audio trackId:\(localAudioTrack?.trackId ?? "--")");
+
+ DispatchQueue.main.asyncAfter(deadline: .now() + 10) {
+ self.client?.setZoomLevel(zoomFactor: 2)
+ }
}
diff --git a/WebRTC-Sample-App/WebRTC-Sample-App.entitlements b/WebRTC-Sample-App/WebRTC-Sample-App.entitlements
index 2099144..3e03c56 100644
--- a/WebRTC-Sample-App/WebRTC-Sample-App.entitlements
+++ b/WebRTC-Sample-App/WebRTC-Sample-App.entitlements
@@ -4,7 +4,8 @@
com.apple.security.application-groups
- group.io.antmedia.ios.webrtc.sample
+ group.io.antmedia.sbd.webrtc.sample
+ group.io.sbd.chatdemo
diff --git a/WebRTC-Sample-App/WebRTC-Sample-AppDebug.entitlements b/WebRTC-Sample-App/WebRTC-Sample-AppDebug.entitlements
index 2099144..3e03c56 100644
--- a/WebRTC-Sample-App/WebRTC-Sample-AppDebug.entitlements
+++ b/WebRTC-Sample-App/WebRTC-Sample-AppDebug.entitlements
@@ -4,7 +4,8 @@
com.apple.security.application-groups
- group.io.antmedia.ios.webrtc.sample
+ group.io.antmedia.sbd.webrtc.sample
+ group.io.sbd.chatdemo
diff --git a/WebRTC-Sample-App/WelcomeViewController.swift b/WebRTC-Sample-App/WelcomeViewController.swift
index b7014f8..a26f92e 100644
--- a/WebRTC-Sample-App/WelcomeViewController.swift
+++ b/WebRTC-Sample-App/WelcomeViewController.swift
@@ -39,7 +39,8 @@ class WelcomeViewController: UIViewController {
var clientToken: String!
var isConnected = false
var tapGesture: UITapGestureRecognizer!
- let sharedDefault = UserDefaults(suiteName: "group.io.antmedia.ios.webrtc.sample")!
+ // original: group.io.antmedia.ios.webrtc.sample
+ let sharedDefault = UserDefaults(suiteName: "group.io.antmedia.sbd.webrtc.sample")! // for test
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
@@ -143,6 +144,11 @@ class WelcomeViewController: UIViewController {
private func showVideo()
{
+ if self.modeSelection.selectedSegmentIndex == 3 {
+ self.show(ShareScreenController(), sender: nil)
+ return
+ }
+
if self.getMode() != AntMediaClientMode.conference {
let controller = UIStoryboard.init(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "Video") as! VideoViewController
controller.clientUrl = self.clientUrl
@@ -161,4 +167,3 @@ class WelcomeViewController: UIViewController {
}
}
-
diff --git a/WebRTCiOSSDK.xcodeproj/project.pbxproj b/WebRTCiOSSDK.xcodeproj/project.pbxproj
index 66fe6f5..ae46b73 100644
--- a/WebRTCiOSSDK.xcodeproj/project.pbxproj
+++ b/WebRTCiOSSDK.xcodeproj/project.pbxproj
@@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
+ 52E687E92D57D0E3001A459E /* ShareScreenController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 52E687E82D57D0E3001A459E /* ShareScreenController.swift */; };
A8B965F62A06969900D67CA1 /* ReplayKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A8B965F52A06969900D67CA1 /* ReplayKit.framework */; };
A8B965F92A06969900D67CA1 /* SampleHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8B965F82A06969900D67CA1 /* SampleHandler.swift */; };
A8B965FD2A06969A00D67CA1 /* ScreenShare.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = A8B965F42A06969900D67CA1 /* ScreenShare.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
@@ -151,6 +152,7 @@
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
+ 52E687E82D57D0E3001A459E /* ShareScreenController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShareScreenController.swift; sourceTree = ""; };
A8522FE02B832967007BC5A8 /* WebRTC-Sample-AppDebug.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "WebRTC-Sample-AppDebug.entitlements"; sourceTree = ""; };
A8B965F42A06969900D67CA1 /* ScreenShare.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = ScreenShare.appex; sourceTree = BUILT_PRODUCTS_DIR; };
A8B965F52A06969900D67CA1 /* ReplayKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ReplayKit.framework; path = System/Library/Frameworks/ReplayKit.framework; sourceTree = SDKROOT; };
@@ -353,6 +355,7 @@
A8DAC0382A063BC00007CDE7 /* Util */,
A8DAC0362A063BC00007CDE7 /* VideoViewController.swift */,
A8DAC0372A063BC00007CDE7 /* WelcomeViewController.swift */,
+ 52E687E82D57D0E3001A459E /* ShareScreenController.swift */,
A8DAC0052A063AB30007CDE7 /* AppDelegate.swift */,
A8DAC0072A063AB30007CDE7 /* SceneDelegate.swift */,
A8DAC00B2A063AB30007CDE7 /* Main.storyboard */,
@@ -692,6 +695,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
+ 52E687E92D57D0E3001A459E /* ShareScreenController.swift in Sources */,
A8DAC0462A063BC00007CDE7 /* SwiftyUserDefaults.swift in Sources */,
A8DAC0452A063BC00007CDE7 /* Defaults.swift in Sources */,
A8DAC0432A063BC00007CDE7 /* UIApplication.swift in Sources */,
@@ -785,7 +789,7 @@
CODE_SIGN_ENTITLEMENTS = ScreenShare/ScreenShare.entitlements;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = ScreenShare/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = ScreenShare;
@@ -797,7 +801,7 @@
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.screen.share;
+ PRODUCT_BUNDLE_IDENTIFIER = antmedia.sbd.sample.screen;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_EMIT_LOC_STRINGS = YES;
@@ -812,7 +816,7 @@
CODE_SIGN_ENTITLEMENTS = ScreenShare/ScreenShare.entitlements;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = ScreenShare/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = ScreenShare;
@@ -824,7 +828,7 @@
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.screen.share;
+ PRODUCT_BUNDLE_IDENTIFIER = antmedia.sbd.sample.screen;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_EMIT_LOC_STRINGS = YES;
@@ -962,7 +966,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
- DEVELOPMENT_TEAM = 2YK9J8G25K;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
@@ -997,7 +1001,7 @@
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
- DEVELOPMENT_TEAM = 2YK9J8G25K;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
@@ -1031,7 +1035,7 @@
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = 2YK9J8G25K;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 1.0;
@@ -1049,7 +1053,7 @@
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = 2YK9J8G25K;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 1.0;
@@ -1071,7 +1075,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = "WebRTC-Sample-App/Info.plist";
INFOPLIST_KEY_CFBundleDisplayName = "WebRTC Sample";
@@ -1088,7 +1092,7 @@
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample;
+ PRODUCT_BUNDLE_IDENTIFIER = antmedia.sbd.sample;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
@@ -1110,7 +1114,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = "WebRTC-Sample-App/Info.plist";
INFOPLIST_KEY_CFBundleDisplayName = "WebRTC Sample";
@@ -1127,7 +1131,7 @@
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample;
+ PRODUCT_BUNDLE_IDENTIFIER = antmedia.sbd.sample;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
@@ -1146,11 +1150,11 @@
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.AppTests;
+ PRODUCT_BUNDLE_IDENTIFIER = antmedia.sbd.webrtc.sample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
@@ -1166,11 +1170,11 @@
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.AppTests;
+ PRODUCT_BUNDLE_IDENTIFIER = antmedia.sbd.webrtc.sample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
@@ -1185,11 +1189,11 @@
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.AppUITests;
+ PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.AppUlTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
@@ -1204,11 +1208,11 @@
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
- DEVELOPMENT_TEAM = MJU7KX4L7S;
+ DEVELOPMENT_TEAM = 84D2TM7KGZ;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.AppUITests;
+ PRODUCT_BUNDLE_IDENTIFIER = io.antmedia.ios.webrtc.sample.AppUlTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
diff --git a/WebRTCiOSSDK/api/AntMediaClient.swift b/WebRTCiOSSDK/api/AntMediaClient.swift
index f05171f..782d620 100644
--- a/WebRTCiOSSDK/api/AntMediaClient.swift
+++ b/WebRTCiOSSDK/api/AntMediaClient.swift
@@ -415,6 +415,41 @@ open class AntMediaClient: NSObject, AntMediaClientProtocol {
}
}
+ public func setZoomLevel(zoomFactor: CGFloat) {
+ guard let streamId = publisherStreamId, let camera = webRTCClientMap[streamId]?.captureDevice else { return }
+
+ do {
+ try camera.lockForConfiguration()
+ camera.videoZoomFactor = max(1.0, min(zoomFactor, camera.activeFormat.videoMaxZoomFactor)) // Keep within limits
+ camera.unlockForConfiguration()
+ } catch {
+ print("Failed to set zoom level: \(error)")
+ }
+ }
+
+ public func smoothZoom(to zoomFactor: CGFloat, rate: Float) {
+ guard let streamId = publisherStreamId, let camera = webRTCClientMap[streamId]?.captureDevice else { return }
+
+ do {
+ try camera.lockForConfiguration()
+ camera.ramp(toVideoZoomFactor: max(1.0, min(zoomFactor, camera.activeFormat.videoMaxZoomFactor)), withRate: rate)
+ camera.unlockForConfiguration()
+ } catch {
+ print("Failed to ramp zoom: \(error)")
+ }
+ }
+
+ public func stopZoomRamp() {
+ guard let streamId = publisherStreamId, let camera = webRTCClientMap[streamId]?.captureDevice else { return }
+
+ do {
+ try camera.lockForConfiguration()
+ camera.cancelVideoZoomRamp()
+ camera.unlockForConfiguration()
+ } catch {
+ print("Failed to cancel zoom ramp: \(error)")
+ }
+ }
/*
Connect to websocket.
@@ -1216,10 +1251,8 @@ open class AntMediaClient: NSObject, AntMediaClientProtocol {
(self.webRTCClientMap[self.getPublisherStreamId()]?.getVideoCapturer() as? RTCCustomFrameCapturer)?.capture(pixelBuffer, rotation: rotation, timeStampNs: timestampNs);
}
-
public func enableVideoTrack(trackId:String, enabled:Bool){
if (isWebSocketConnected) {
-
let jsonString = [
COMMAND: ENABLE_VIDEO_TRACK_COMMAND,
TRACK_ID: trackId,
@@ -1625,6 +1658,15 @@ extension AntMediaClient {
)
}
}
-
-
+}
+
+extension AntMediaClient {
+ /// Use this method when streaming is started.
+ public func setFrameCapturer(_ capturer: @escaping (_ buffer: CVPixelBuffer, _ frame: CGRect) -> CGRect?) {
+ guard let streamId = publisherStreamId else {
+ return
+ }
+
+ (webRTCClientMap[streamId]?.videoCapturer as? RTCCustomFrameCapturer)?.frameCapturer = capturer
+ }
}
diff --git a/WebRTCiOSSDK/api/AntMediaClientProtocol.swift b/WebRTCiOSSDK/api/AntMediaClientProtocol.swift
index 866c7a5..9d11d0d 100644
--- a/WebRTCiOSSDK/api/AntMediaClientProtocol.swift
+++ b/WebRTCiOSSDK/api/AntMediaClientProtocol.swift
@@ -162,6 +162,25 @@ public protocol AntMediaClientProtocol {
*/
func switchCamera()
+ /**
+ Instant zoom
+ 1.0 means no zoom, 2.0 means 2x zoom, and so on.
+ The method ensures the zoom does not exceed the camera’s limits.
+ */
+ func setZoomLevel(zoomFactor: CGFloat)
+
+ /**
+ Smooth zoom
+ The rate controls how fast the zoom happens.
+ Lower values (e.g., 1.0) mean slow zoom; higher values (e.g., 5.0) mean faster zoom.
+ */
+ func smoothZoom(to zoomFactor: CGFloat, rate: Float)
+
+ /**
+ If a zoom ramp is in progress, you can cancel it immediately:
+ */
+ func stopZoomRamp()
+
/**
Sends data via WebRTC's Data Channel.
- Parameters:
diff --git a/WebRTCiOSSDK/api/webrtc/RTCCustomFrameCapturer.swift b/WebRTCiOSSDK/api/webrtc/RTCCustomFrameCapturer.swift
index 82b9d31..c4ad87a 100644
--- a/WebRTCiOSSDK/api/webrtc/RTCCustomFrameCapturer.swift
+++ b/WebRTCiOSSDK/api/webrtc/RTCCustomFrameCapturer.swift
@@ -16,7 +16,7 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
var nanoseconds: Float64 = 0
var lastSentFrameTimeStampNanoSeconds: Int64 = 0;
private var targetHeight: Int
-
+
private var videoEnabled: Bool = true;
private var audioEnabled: Bool = true;
@@ -24,13 +24,13 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
private var frameRateIntervalNanoSeconds : Float64 = 0;
-
// if externalCapture is true, it means that capture method is called from an external component.
// externalComponent is the BroadcastExtension
private var externalCapture: Bool;
private var fps: Int;
+ var frameCapturer: ((_ buffer: CVPixelBuffer, _ frame: CGRect) -> CGRect?)?
init(delegate: RTCVideoCapturerDelegate, height: Int, externalCapture: Bool = false, videoEnabled: Bool = true, audioEnabled: Bool = false, fps: Int = 30)
{
@@ -42,48 +42,66 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
self.audioEnabled = audioEnabled;
self.frameRateIntervalNanoSeconds = kNanosecondsPerSecond/Double(fps);
self.fps = fps;
-
- super.init(delegate: delegate)
+ super.init(delegate: delegate)
}
public func setWebRTCClient(webRTCClient: WebRTCClient) {
self.webRTCClient = webRTCClient
}
- public func capture(_ pixelBuffer: CVPixelBuffer, rotation:RTCVideoRotation, timeStampNs: Int64 )
- {
- if ((Double(timeStampNs) - Double(lastSentFrameTimeStampNanoSeconds)) < frameRateIntervalNanoSeconds ) {
- AntMediaClient.verbose("Dropping frame because high fps than the configured fps: \(fps). Incoming timestampNs:\(timeStampNs) last sent timestampNs:\(lastSentFrameTimeStampNanoSeconds) frameRateIntervalNs:\(frameRateIntervalNanoSeconds)");
- return;
-
- }
-
+ private func getCropRect(pixelBuffer: CVPixelBuffer) -> (cropX: Int32, cropY: Int32, cropWidth: Int32, cropHeight: Int32) {
let width = Int32(CVPixelBufferGetWidth(pixelBuffer))
let height = Int32(CVPixelBufferGetHeight(pixelBuffer))
- var scaledWidth = (width * Int32(self.targetHeight)) / height;
- if (scaledWidth % 2 == 1) {
- scaledWidth+=1;
+ let cropRect = self.frameCapturer?(pixelBuffer, .init(x: 0, y: 0, width: width.asCGFLoat, height: height.asCGFLoat)) ?? CGRect(x: 0, y: 0, width: width.asCGFLoat, height: height.asCGFLoat)
+
+ return (cropRect.minX.asInt32, cropRect.minY.asInt32, min(cropRect.width.asInt32, width), min(cropRect.height.asInt32, height))
+ }
+
+ private func calculateAdaptedSize(cropWidth: Int32, cropHeight: Int32) -> (adaptedWidth: Int32, adaptedHeight: Int32) {
+ let maxHeight: Int32 = targetHeight.asInt32
+ var adaptedHeight = min(cropHeight, maxHeight)
+ var adaptedWidth = (cropWidth * adaptedHeight) / cropHeight
+ adaptedWidth = min(adaptedWidth, cropWidth)
+
+ if adaptedWidth % 2 != 0 { adaptedWidth += 1 }
+ if adaptedHeight % 2 != 0 { adaptedHeight += 1 }
+
+ return (adaptedWidth, adaptedHeight)
+ }
+
+ private func shouldDropFrame(timeStampNs: Int64) -> Bool {
+ return (Double(timeStampNs) - Double(lastSentFrameTimeStampNanoSeconds)) < frameRateIntervalNanoSeconds
+ }
+
+ public func capture(_ pixelBuffer: CVPixelBuffer, rotation: RTCVideoRotation, timeStampNs: Int64) {
+ if shouldDropFrame(timeStampNs: timeStampNs) {
+ AntMediaClient.verbose("Dropping frame due to high FPS: \(fps). Incoming timestamp: \(timeStampNs)")
+ return
}
+ let (cropX, cropY, cropWidth, cropHeight) = getCropRect(pixelBuffer: pixelBuffer)
+ let (adaptedWidth, adaptedHeight) = calculateAdaptedSize(cropWidth: cropWidth, cropHeight: cropHeight)
+
let rtcPixelBuffer = RTCCVPixelBuffer(
pixelBuffer: pixelBuffer,
- adaptedWidth:scaledWidth,
- adaptedHeight: Int32(self.targetHeight),
- cropWidth: width,
- cropHeight: height,
- cropX: 0,
- cropY: 0)
+ adaptedWidth: adaptedWidth,
+ adaptedHeight: adaptedHeight,
+ cropWidth: cropWidth,
+ cropHeight: cropHeight,
+ cropX: cropX,
+ cropY: cropY
+ )
let rtcVideoFrame = RTCVideoFrame(
- buffer: rtcPixelBuffer,
- rotation: rotation,
- timeStampNs: Int64(timeStampNs)
- )
+ buffer: rtcPixelBuffer,
+ rotation: rotation,
+ timeStampNs: Int64(timeStampNs)
+ )
self.delegate?.capturer(self, didCapture: rtcVideoFrame.newI420())
- lastSentFrameTimeStampNanoSeconds = Int64(timeStampNs);
+ lastSentFrameTimeStampNanoSeconds = Int64(timeStampNs)
}
public func capture(_ sampleBuffer: CMSampleBuffer, externalRotation:Int = -1) {
@@ -96,8 +114,8 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
}
let timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
- kNanosecondsPerSecond;
-
+ kNanosecondsPerSecond;
+
if ((Double(timeStampNs) - Double(lastSentFrameTimeStampNanoSeconds)) < frameRateIntervalNanoSeconds ) {
AntMediaClient.verbose("Dropping frame because high fps than the configured fps: \(fps). Incoming timestampNs:\(timeStampNs) last sent timestampNs:\(lastSentFrameTimeStampNanoSeconds) frameRateIntervalNs:\(frameRateIntervalNanoSeconds)");
return;
@@ -146,7 +164,7 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
else {
rotation = RTCVideoRotation(rawValue:externalRotation) ?? RTCVideoRotation._0;
}
-
+
capture(pixelBuffer, rotation: rotation, timeStampNs: Int64(timeStampNs))
//NSLog("Device orientation width: %d, height:%d ", width, height);
@@ -157,12 +175,10 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
}
- public func startCapture()
- {
- if !externalCapture
- {
+ public func startCapture() {
+ if !externalCapture {
let recorder = RPScreenRecorder.shared();
-
+
if #available(iOS 11.0, *) {
recorder.startCapture { (buffer, bufferType, error) in
if bufferType == RPSampleBufferType.video && self.videoEnabled
@@ -185,25 +201,39 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
}
}
- public func stopCapture()
- {
+ public func stopCapture() {
if !externalCapture {
let recorder = RPScreenRecorder.shared();
if (recorder.isRecording) {
- if #available(iOS 11.0, *) {
- recorder.stopCapture { (error) in
- guard error == nil else {
- AntMediaClient.printf("Cannot stop capture \(String(describing: error))");
- return;
- }
- }
- } else {
-
- }
- }
+ if #available(iOS 11.0, *) {
+ recorder.stopCapture { (error) in
+ guard error == nil else {
+ AntMediaClient.printf("Cannot stop capture \(String(describing: error))");
+ return;
+ }
+ }
+ } else {
+
+ }
+ }
}
}
-
-
}
+extension CGFloat {
+ var asInt32: Int32 {
+ return Int32(self)
+ }
+}
+
+extension Int32 {
+ var asCGFLoat: CGFloat {
+ return CGFloat(self)
+ }
+}
+
+extension Int {
+ var asInt32: Int32 {
+ return Int32(self)
+ }
+}
diff --git a/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift b/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift
index 83a21d2..6e1365f 100644
--- a/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift
+++ b/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift
@@ -23,7 +23,7 @@ class WebRTCClient: NSObject {
var delegate: WebRTCClientDelegate?
var peerConnection : RTCPeerConnection?
- private var videoCapturer: RTCVideoCapturer?
+ private(set) var videoCapturer: RTCVideoCapturer?
var localVideoTrack: RTCVideoTrack!
var localAudioTrack: RTCAudioTrack!
var remoteVideoTrack: RTCVideoTrack!
@@ -56,6 +56,12 @@ class WebRTCClient: NSObject {
private var externalAudio: Bool = false;
private var cameraSourceFPS: Int = 30;
+
+ // this is not an ideal method to get current capture device, we need more legit solution
+ var captureDevice: AVCaptureDevice? {
+ (RTCCameraVideoCapturer.captureDevices().first { $0.position == self.cameraPosition })
+ }
+
/*
State of the connection
*/
@@ -329,13 +335,10 @@ class WebRTCClient: NSObject {
return iceConnectionState;
}
-
+ @discardableResult
private func startCapture() -> Bool {
-
- let camera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == self.cameraPosition })
-
- if (camera != nil) {
- let supportedFormats = RTCCameraVideoCapturer.supportedFormats(for: camera!)
+ if (captureDevice != nil) {
+ let supportedFormats = RTCCameraVideoCapturer.supportedFormats(for: captureDevice!)
var currentDiff = INT_MAX
var selectedFormat: AVCaptureDevice.Format? = nil
for supportedFormat in supportedFormats {
@@ -348,7 +351,6 @@ class WebRTCClient: NSObject {
}
if (selectedFormat != nil) {
-
var maxSupportedFramerate: Float64 = 0;
for fpsRange in selectedFormat!.videoSupportedFrameRateRanges {
maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate);
@@ -362,7 +364,7 @@ class WebRTCClient: NSObject {
let cameraVideoCapturer = self.videoCapturer as? RTCCameraVideoCapturer;
- cameraVideoCapturer?.startCapture(with: camera!,
+ cameraVideoCapturer?.startCapture(with: captureDevice!,
format: selectedFormat!,
fps: Int(fps))
@@ -420,7 +422,7 @@ class WebRTCClient: NSObject {
self.videoSender = self.peerConnection?.add(self.localVideoTrack, streamIds: [LOCAL_MEDIA_STREAM_ID])
- if let params = videoSender?.parameters
+ if let params = videoSender?.parameters
{
params.degradationPreference = (self.degradationPreference.rawValue) as NSNumber
videoSender?.parameters = params
@@ -576,10 +578,10 @@ extension WebRTCClient: RTCPeerConnectionDelegate {
func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
let candidateJson = ["command": "takeCandidate",
"type" : "candidate",
- "streamId": self.streamId,
+ "streamId": self.streamId ?? "",
"candidate" : candidate.sdp,
"label": candidate.sdpMLineIndex,
- "id": candidate.sdpMid] as [String : Any]
+ "id": candidate.sdpMid ?? ""] as [String : Any]
self.delegate?.sendMessage(candidateJson)
}