-
Notifications
You must be signed in to change notification settings - Fork 24
/
Copy pathAntMediaClientProtocol.swift
231 lines (194 loc) · 8.65 KB
/
AntMediaClientProtocol.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
//
// AntMediaClientProtocol.swift
// WebRTCiOSSDK
//
// Created by mekya on 8.08.2020.
// Copyright © 2020 AntMedia. All rights reserved.
//
import Foundation
import AVFoundation
import WebRTC
let COMMAND = "command"
let STREAM_ID = "streamId"
let TOKEN_ID = "token"
let VIDEO = "video"
let AUDIO = "audio"
let ROOM_ID = "room";
let NOTIFICATION = "notification";
let JOINED_ROOM_DEFINITION = "joinedTheRoom";
let DEFINITION = "definition";
let STREAMS = "streams";
let ROOM_INFORMATION_COMMAND = "roomInformation";
let GET_STREAM_INFO_COMMAND = "getStreamInfo";
let STREAM_INFORMATION_COMMAND = "streamInformation";
let FORCE_STREAM_QUALITY_INFO = "forceStreamQuality";
let STREAM_HEIGHT_FIELD = "streamHeight";
public protocol AntMediaClientProtocol {
/**
Sets the required options to for Ant Media Client to Run
- Parameters:
- url: Full Ant Media Server's websocket url. You can use ws or wss . It should be something
ws://your_server_address:5080/WebRTCAppEE/websocket?target=origin
wss://your_server_address:5443/WebRTCAppEE/websocket?target=origin
target query details:
It's not mandatory if you don't use the new Load Balancer mechanism
It uses one of the nodes on Cluster mode
Example parameters: "origin" or "edge"
Default value is origin
- streamId: The stream id that you use in your connection. You either play or publish with this stream id.
- token: If you active one-time token on server side, you should enter token value in here. If one-time token is not activated, just leave empty
- mode: The Mode of the Client. It should .play, .publish or .join. If it's .play, it means your WebRTC client will play a stream with your streamId
on the server. If it's .publish, it mean your WebRTC client will publish stream with your stream id.
- enableDataChannel: Enable or disable data channel on the mobile side. In order to make data channel work, you also need to enable it on server side
- captureScreenEnabled: Captures the screen of the application. If BroadcastExtension is used, 'setExternalVideoCapture' should be set also
*/
func setOptions(url: String, streamId: String, token: String, mode: AntMediaClientMode ,enableDataChannel: Bool, captureScreenEnabled: Bool)
/**
Enable or disable video completely in the WebRTC Client. It should be called before `initPeerConnection()` and `start()` method.
It's generally used for disabling video in order to have only audio streaming. If video is disabled by this method, it's not enabled in the same session again. Video is enabled by default.
- Parameters:
enable: Enable or disable video in the connection.
*/
func setVideoEnable( enable: Bool)
/**
Set the speaker on. It works if audio session is already started so calling this method may not work if it's called too early.
The correct place to call it in AntMediaClientDelegate's `audioSessionDidStartPlayOrRecord` method.
*/
func speakerOn();
/**
Set the speaker off. It works if audio session is already started so calling this method may not work if it's called too early.
The correct place to call it in AntMediaClientDelegate's `audioSessionDidStartPlayOrRecord` method.
*/
func speakerOff();
/**
Initializes the peer connection and opens the camera if it's publish mode but it does not start the streaming. It's not necessary to call this method. `start()` method calls this method if it's required. This method is generally used opening the camera and let the user tap a button to start publishing
*/
func initPeerConnection()
/**
Starts the streaming according to the mode of the client.
*/
func start();
/**
Sets the camera position front or back. This method is effective if it's called before `initPeerConnection()` and `start()` method.
- Parameters:
- position: The camera position to open
*/
func setCameraPosition(position: AVCaptureDevice.Position);
/**
Sets the camera resolution. This method is effective if it's called before `initPeerConnection()` and `start()` method.
- Parameters:
- width: Resolution width
- height:Resolution height
*/
func setTargetResolution(width: Int, height: Int);
/**
Stops the connection and release resources
*/
func stop();
/**
Switches camera on the fly.
*/
func switchCamera()
/**
Sends data via WebRTC's Data Channel.
- Parameters:
- data: The Data to send via data channel
- binary: The type of data. It should be true, if it's binary
*/
func sendData(data: Data, binary: Bool);
/**
Status of the data channel. Both server and mobile side, should enable data channel to let this method return true
- Returns: true if data channel is active, false if it's disabled
*/
func isDataChannelActive() -> Bool;
/**
The UIView element that local camera view will be rendered to.
- Parameters
- container: The UI View element
- mode: Scale mode of the view.
*/
func setLocalView( container: UIView, mode:UIView.ContentMode)
/**
The UIView element that remote stream(playing stream) will be rendered to.
- Parameters
- container: The UI View element
- mode: Scale mode of the view.
*/
func setRemoteView(remoteContainer: UIView, mode:UIView.ContentMode)
/**
- Returns: true if websocket is connected, false if websocket is not connected
*/
func isConnected() -> Bool;
/**
Set the debug mode. If it's true, log messages will be available.
*/
@available(*, deprecated, message: "Use static version of setDebug")
func setDebug(_ value: Bool);
/**
Set the debug mode. If it's true, log messages will be written to the console. It's disabled by default.
*/
static func setDebug(_ value: Bool);
/**
Toggle audio in the current stream. If it's muted, it will be unmuted. If it's unmuted, it'll be muted.
*/
func toggleAudio();
/**
Toggle video stream(enable, disable) in the current stream.
*/
func toggleVideo();
/**
Stream id that this client uses.
*/
func getStreamId() -> String;
/**
Gets the stream info from the server side. Return information includes width, height, video bitrate, audio bitrates and video codec.
If there are more than one bitrate or resolution, it will provides a stream information list.
This method triggers streamInformation delegate method to be called. If there is no stream with initialized WebRTCClient, it will not trigger streamInformation.
Server return no stream exists error through websocket.
With the information in the message of streamInformation, you can call the forceStreamQuality method.
*/
func getStreamInfo();
/**
It forces a specific resolution to be played. You can get the resolution height values by calling getStreamInfo.
If the resolution is set to 0, then automatic stream quality will be used according to the measured network speed.
*/
func forStreamQuality(resolutionHeight:Int);
/**
It get webrtc statistis and calls completionHandler. There is a sample code for below to get the audio level
in the application latyer
self.client.getStats { (statisticsReport) in
for stat in statisticsReport.statistics {
if (stat.value.type == "track") {
for value in stat.value.values
{
if (value.key == "audioLevel") {
AntMediaClient.printf("audio level: \(value.value)");
}
}
}
}
};
*/
func getStats(completionHandler: @escaping (RTCStatisticsReport) -> Void);
/**
Set the max video bitrate for publishing the stream
*/
func setMaxVideoBps(videoBitratePerSecond: NSNumber);
//
//Deliver external audio to the Ant Media Client. It's likely coming from Broadcast Extension
//
func deliverExternalAudio(sampleBuffer: CMSampleBuffer);
//
//Set external audio if audio is coming from Broadcast Extension.
//It initializes the WebRTC client accordingly
//
func setExternalAudio(externalAudioEnabled: Bool);
//
//Set external video if video is coming from Broadcast Extension
//
func setExternalVideoCapture(externalVideoCapture: Bool);
//
//Deliver external frame
//
func deliverExternalVideo(sampleBuffer: CMSampleBuffer);
}