-
Notifications
You must be signed in to change notification settings - Fork 1.1k
/
Copy pathVideoStream.java
731 lines (636 loc) · 22.8 KB
/
VideoStream.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
/*
* Copyright (C) 2011-2014 GUIGUI Simon, [email protected]
*
* This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
*
* Spydroid is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This source code is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this source code; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.majorkernelpanic.streaming.video;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import net.majorkernelpanic.streaming.MediaStream;
import net.majorkernelpanic.streaming.Stream;
import net.majorkernelpanic.streaming.exceptions.CameraInUseException;
import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
import net.majorkernelpanic.streaming.exceptions.InvalidSurfaceException;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.hw.EncoderDebugger;
import net.majorkernelpanic.streaming.hw.NV21Convertor;
import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream;
import android.annotation.SuppressLint;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Looper;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
/**
* Don't use this class directly.
*/
public abstract class VideoStream extends MediaStream {
protected final static String TAG = "VideoStream";
protected VideoQuality mRequestedQuality = VideoQuality.DEFAULT_VIDEO_QUALITY.clone();
protected VideoQuality mQuality = mRequestedQuality.clone();
protected SurfaceHolder.Callback mSurfaceHolderCallback = null;
protected SurfaceView mSurfaceView = null;
protected SharedPreferences mSettings = null;
protected int mVideoEncoder, mCameraId = 0;
protected int mRequestedOrientation = 0, mOrientation = 0;
protected Camera mCamera;
protected Thread mCameraThread;
protected Looper mCameraLooper;
protected boolean mCameraOpenedManually = true;
protected boolean mFlashEnabled = false;
protected boolean mSurfaceReady = false;
protected boolean mUnlocked = false;
protected boolean mPreviewStarted = false;
protected boolean mUpdated = false;
protected String mMimeType;
protected String mEncoderName;
protected int mEncoderColorFormat;
protected int mCameraImageFormat;
protected int mMaxFps = 0;
/**
* Don't use this class directly.
* Uses CAMERA_FACING_BACK by default.
*/
public VideoStream() {
this(CameraInfo.CAMERA_FACING_BACK);
}
/**
* Don't use this class directly
* @param camera Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
*/
@SuppressLint("InlinedApi")
public VideoStream(int camera) {
super();
setCamera(camera);
}
/**
* Sets the camera that will be used to capture video.
* You can call this method at any time and changes will take effect next time you start the stream.
* @param camera Can be either CameraInfo.CAMERA_FACING_BACK or CameraInfo.CAMERA_FACING_FRONT
*/
public void setCamera(int camera) {
CameraInfo cameraInfo = new CameraInfo();
int numberOfCameras = Camera.getNumberOfCameras();
for (int i=0;i<numberOfCameras;i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == camera) {
mCameraId = i;
break;
}
}
}
/** Switch between the front facing and the back facing camera of the phone.
* If {@link #startPreview()} has been called, the preview will be briefly interrupted.
* If {@link #start()} has been called, the stream will be briefly interrupted.
* You should not call this method from the main thread if you are already streaming.
* @throws IOException
* @throws RuntimeException
**/
public void switchCamera() throws RuntimeException, IOException {
if (Camera.getNumberOfCameras() == 1) throw new IllegalStateException("Phone only has one camera !");
boolean streaming = mStreaming;
boolean previewing = mCamera!=null && mCameraOpenedManually;
mCameraId = (mCameraId == CameraInfo.CAMERA_FACING_BACK) ? CameraInfo.CAMERA_FACING_FRONT : CameraInfo.CAMERA_FACING_BACK;
setCamera(mCameraId);
stopPreview();
mFlashEnabled = false;
if (previewing) startPreview();
if (streaming) start();
}
/**
* Returns the id of the camera currently selected.
* Can be either {@link CameraInfo#CAMERA_FACING_BACK} or
* {@link CameraInfo#CAMERA_FACING_FRONT}.
*/
public int getCamera() {
return mCameraId;
}
/**
* Sets a Surface to show a preview of recorded media (video).
* You can call this method at any time and changes will take effect next time you call {@link #start()}.
*/
public synchronized void setSurfaceView(SurfaceView view) {
mSurfaceView = view;
if (mSurfaceHolderCallback != null && mSurfaceView != null && mSurfaceView.getHolder() != null) {
mSurfaceView.getHolder().removeCallback(mSurfaceHolderCallback);
}
if (mSurfaceView.getHolder() != null) {
mSurfaceHolderCallback = new Callback() {
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceReady = false;
stopPreview();
Log.d(TAG,"Surface destroyed !");
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
mSurfaceReady = true;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG,"Surface Changed !");
}
};
mSurfaceView.getHolder().addCallback(mSurfaceHolderCallback);
mSurfaceReady = true;
}
}
/** Turns the LED on or off if phone has one. */
public synchronized void setFlashState(boolean state) {
// If the camera has already been opened, we apply the change immediately
if (mCamera != null) {
if (mStreaming && mMode == MODE_MEDIARECORDER_API) {
lockCamera();
}
Parameters parameters = mCamera.getParameters();
// We test if the phone has a flash
if (parameters.getFlashMode()==null) {
// The phone has no flash or the choosen camera can not toggle the flash
throw new RuntimeException("Can't turn the flash on !");
} else {
parameters.setFlashMode(state?Parameters.FLASH_MODE_TORCH:Parameters.FLASH_MODE_OFF);
try {
mCamera.setParameters(parameters);
mFlashEnabled = state;
} catch (RuntimeException e) {
mFlashEnabled = false;
throw new RuntimeException("Can't turn the flash on !");
} finally {
if (mStreaming && mMode == MODE_MEDIARECORDER_API) {
unlockCamera();
}
}
}
} else {
mFlashEnabled = state;
}
}
/**
* Toggles the LED of the phone if it has one.
* You can get the current state of the flash with {@link VideoStream#getFlashState()}.
*/
public synchronized void toggleFlash() {
setFlashState(!mFlashEnabled);
}
/** Indicates whether or not the flash of the phone is on. */
public boolean getFlashState() {
return mFlashEnabled;
}
/**
* Sets the orientation of the preview.
* @param orientation The orientation of the preview
*/
public void setPreviewOrientation(int orientation) {
mRequestedOrientation = orientation;
mUpdated = false;
}
/**
* Sets the configuration of the stream. You can call this method at any time
* and changes will take effect next time you call {@link #configure()}.
* @param videoQuality Quality of the stream
*/
public void setVideoQuality(VideoQuality videoQuality) {
if (!mRequestedQuality.equals(videoQuality)) {
mRequestedQuality = videoQuality.clone();
mUpdated = false;
}
}
/**
* Returns the quality of the stream.
*/
public VideoQuality getVideoQuality() {
return mRequestedQuality;
}
/**
* Some data (SPS and PPS params) needs to be stored when {@link #getSessionDescription()} is called
* @param prefs The SharedPreferences that will be used to save SPS and PPS parameters
*/
public void setPreferences(SharedPreferences prefs) {
mSettings = prefs;
}
/**
* Configures the stream. You need to call this before calling {@link #getSessionDescription()}
* to apply your configuration of the stream.
*/
public synchronized void configure() throws IllegalStateException, IOException {
super.configure();
mOrientation = mRequestedOrientation;
}
/**
* Starts the stream.
* This will also open the camera and dispay the preview
* if {@link #startPreview()} has not aready been called.
*/
public synchronized void start() throws IllegalStateException, IOException {
if (!mPreviewStarted) mCameraOpenedManually = false;
super.start();
Log.d(TAG,"Stream configuration: FPS: "+mQuality.framerate+" Width: "+mQuality.resX+" Height: "+mQuality.resY);
}
/** Stops the stream. */
public synchronized void stop() {
if (mCamera != null) {
if (mMode == MODE_MEDIACODEC_API) {
mCamera.setPreviewCallbackWithBuffer(null);
}
if (mMode == MODE_MEDIACODEC_API_2) {
((SurfaceView)mSurfaceView).removeMediaCodecSurface();
}
super.stop();
// We need to restart the preview
if (!mCameraOpenedManually) {
destroyCamera();
} else {
try {
startPreview();
} catch (RuntimeException e) {
e.printStackTrace();
}
}
}
}
public synchronized void startPreview()
throws CameraInUseException,
InvalidSurfaceException,
RuntimeException {
mCameraOpenedManually = true;
if (!mPreviewStarted) {
createCamera();
updateCamera();
}
}
/**
* Stops the preview.
*/
public synchronized void stopPreview() {
mCameraOpenedManually = false;
stop();
}
/**
* Video encoding is done by a MediaRecorder.
*/
protected void encodeWithMediaRecorder() throws IOException, ConfNotSupportedException {
Log.d(TAG,"Video encoded using the MediaRecorder API");
// We need a local socket to forward data output by the camera to the packetizer
createSockets();
// Reopens the camera if needed
destroyCamera();
createCamera();
mCamera.stopPreview();
// The camera must be unlocked before the MediaRecorder can use it
unlockCamera();
try {
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoEncoder(mVideoEncoder);
mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
mMediaRecorder.setVideoSize(mRequestedQuality.resX,mRequestedQuality.resY);
mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
// The bandwidth actually consumed is often above what was requested
mMediaRecorder.setVideoEncodingBitRate((int)(mRequestedQuality.bitrate*0.8));
// We write the ouput of the camera in a local socket instead of a file !
// This one little trick makes streaming feasible quiet simply: data from the camera
// can then be manipulated at the other end of the socket
mMediaRecorder.setOutputFile(mSender.getFileDescriptor());
mMediaRecorder.prepare();
mMediaRecorder.start();
} catch (Exception e) {
throw new ConfNotSupportedException(e.getMessage());
}
// This will skip the MPEG4 header if this step fails we can't stream anything :(
InputStream is = mReceiver.getInputStream();
try {
byte buffer[] = new byte[4];
// Skip all atoms preceding mdat atom
while (!Thread.interrupted()) {
while (is.read() != 'm');
is.read(buffer,0,3);
if (buffer[0] == 'd' && buffer[1] == 'a' && buffer[2] == 't') break;
}
} catch (IOException e) {
Log.e(TAG,"Couldn't skip mp4 header :/");
stop();
throw e;
}
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(mReceiver.getInputStream());
mPacketizer.start();
mStreaming = true;
}
/**
* Video encoding is done by a MediaCodec.
*/
protected void encodeWithMediaCodec() throws RuntimeException, IOException {
if (mMode == MODE_MEDIACODEC_API_2) {
// Uses the method MediaCodec.createInputSurface to feed the encoder
encodeWithMediaCodecMethod2();
} else {
// Uses dequeueInputBuffer to feed the encoder
encodeWithMediaCodecMethod1();
}
}
/**
* Video encoding is done by a MediaCodec.
*/
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {
Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");
// Updates the parameters of the camera if needed
createCamera();
updateCamera();
// Estimates the framerate of the camera
measureFramerate();
// Starts the preview if needed
if (!mPreviewStarted) {
try {
mCamera.startPreview();
mPreviewStarted = true;
} catch (RuntimeException e) {
destroyCamera();
throw e;
}
}
EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
final NV21Convertor convertor = debugger.getNV21Convertor();
mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
Camera.PreviewCallback callback = new Camera.PreviewCallback() {
long now = System.nanoTime()/1000, oldnow = now, i=0;
ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
oldnow = now;
now = System.nanoTime()/1000;
if (i++>3) {
i = 0;
//Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
}
try {
int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
if (bufferIndex>=0) {
inputBuffers[bufferIndex].clear();
if (data == null) Log.e(TAG,"Symptom of the \"Callback buffer was to small\" problem...");
else convertor.convert(data, inputBuffers[bufferIndex]);
mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
} else {
Log.e(TAG,"No buffer available !");
}
} finally {
mCamera.addCallbackBuffer(data);
}
}
};
for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
mCamera.setPreviewCallbackWithBuffer(callback);
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
mPacketizer.start();
mStreaming = true;
}
/**
* Video encoding is done by a MediaCodec.
* But here we will use the buffer-to-surface method
*/
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
Log.d(TAG,"Video encoded using the MediaCodec API with a surface");
// Updates the parameters of the camera if needed
createCamera();
updateCamera();
// Estimates the framerate of the camera
measureFramerate();
EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = mMediaCodec.createInputSurface();
((SurfaceView)mSurfaceView).addMediaCodecSurface(surface);
mMediaCodec.start();
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
mPacketizer.start();
mStreaming = true;
}
/**
* Returns a description of the stream using SDP.
* This method can only be called after {@link Stream#configure()}.
* @throws IllegalStateException Thrown when {@link Stream#configure()} wa not called.
*/
public abstract String getSessionDescription() throws IllegalStateException;
/**
* Opens the camera in a new Looper thread so that the preview callback is not called from the main thread
* If an exception is thrown in this Looper thread, we bring it back into the main thread.
* @throws RuntimeException Might happen if another app is already using the camera.
*/
private void openCamera() throws RuntimeException {
final Semaphore lock = new Semaphore(0);
final RuntimeException[] exception = new RuntimeException[1];
mCameraThread = new Thread(new Runnable() {
@Override
public void run() {
Looper.prepare();
mCameraLooper = Looper.myLooper();
try {
mCamera = Camera.open(mCameraId);
} catch (RuntimeException e) {
exception[0] = e;
} finally {
lock.release();
Looper.loop();
}
}
});
mCameraThread.start();
lock.acquireUninterruptibly();
if (exception[0] != null) throw new CameraInUseException(exception[0].getMessage());
}
protected synchronized void createCamera() throws RuntimeException {
if (mSurfaceView == null)
throw new InvalidSurfaceException("Invalid surface !");
if (mSurfaceView.getHolder() == null || !mSurfaceReady)
throw new InvalidSurfaceException("Invalid surface !");
if (mCamera == null) {
openCamera();
mUpdated = false;
mUnlocked = false;
mCamera.setErrorCallback(new Camera.ErrorCallback() {
@Override
public void onError(int error, Camera camera) {
// On some phones when trying to use the camera facing front the media server will die
// Whether or not this callback may be called really depends on the phone
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// In this case the application must release the camera and instantiate a new one
Log.e(TAG,"Media server died !");
// We don't know in what thread we are so stop needs to be synchronized
mCameraOpenedManually = false;
stop();
} else {
Log.e(TAG,"Error unknown with the camera: "+error);
}
}
});
try {
// If the phone has a flash, we turn it on/off according to mFlashEnabled
// setRecordingHint(true) is a very nice optimization if you plane to only use the Camera for recording
Parameters parameters = mCamera.getParameters();
if (parameters.getFlashMode()!=null) {
parameters.setFlashMode(mFlashEnabled?Parameters.FLASH_MODE_TORCH:Parameters.FLASH_MODE_OFF);
}
parameters.setRecordingHint(true);
mCamera.setParameters(parameters);
mCamera.setDisplayOrientation(mOrientation);
try {
if (mMode == MODE_MEDIACODEC_API_2) {
mSurfaceView.startGLThread();
mCamera.setPreviewTexture(mSurfaceView.getSurfaceTexture());
} else {
mCamera.setPreviewDisplay(mSurfaceView.getHolder());
}
} catch (IOException e) {
throw new InvalidSurfaceException("Invalid surface !");
}
} catch (RuntimeException e) {
destroyCamera();
throw e;
}
}
}
protected synchronized void destroyCamera() {
if (mCamera != null) {
if (mStreaming) super.stop();
lockCamera();
mCamera.stopPreview();
try {
mCamera.release();
} catch (Exception e) {
Log.e(TAG,e.getMessage()!=null?e.getMessage():"unknown error");
}
mCamera = null;
mCameraLooper.quit();
mUnlocked = false;
mPreviewStarted = false;
}
}
protected synchronized void updateCamera() throws RuntimeException {
// The camera is already correctly configured
if (mUpdated) return;
if (mPreviewStarted) {
mPreviewStarted = false;
mCamera.stopPreview();
}
Parameters parameters = mCamera.getParameters();
mQuality = VideoQuality.determineClosestSupportedResolution(parameters, mQuality);
int[] max = VideoQuality.determineMaximumSupportedFramerate(parameters);
double ratio = (double)mQuality.resX/(double)mQuality.resY;
mSurfaceView.requestAspectRatio(ratio);
parameters.setPreviewFormat(mCameraImageFormat);
parameters.setPreviewSize(mQuality.resX, mQuality.resY);
parameters.setPreviewFpsRange(max[0], max[1]);
try {
mCamera.setParameters(parameters);
mCamera.setDisplayOrientation(mOrientation);
mCamera.startPreview();
mPreviewStarted = true;
mUpdated = true;
} catch (RuntimeException e) {
destroyCamera();
throw e;
}
}
protected void lockCamera() {
if (mUnlocked) {
Log.d(TAG,"Locking camera");
try {
mCamera.reconnect();
} catch (Exception e) {
Log.e(TAG,e.getMessage());
}
mUnlocked = false;
}
}
protected void unlockCamera() {
if (!mUnlocked) {
Log.d(TAG,"Unlocking camera");
try {
mCamera.unlock();
} catch (Exception e) {
Log.e(TAG,e.getMessage());
}
mUnlocked = true;
}
}
/**
* Computes the average frame rate at which the preview callback is called.
* We will then use this average framerate with the MediaCodec.
* Blocks the thread in which this function is called.
*/
private void measureFramerate() {
final Semaphore lock = new Semaphore(0);
final Camera.PreviewCallback callback = new Camera.PreviewCallback() {
int i = 0, t = 0;
long now, oldnow, count = 0;
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
i++;
now = System.nanoTime()/1000;
if (i>3) {
t += now - oldnow;
count++;
}
if (i>20) {
mQuality.framerate = (int) (1000000/(t/count)+1);
lock.release();
}
oldnow = now;
}
};
mCamera.setPreviewCallback(callback);
try {
lock.tryAcquire(2,TimeUnit.SECONDS);
Log.d(TAG,"Actual framerate: "+mQuality.framerate);
if (mSettings != null) {
Editor editor = mSettings.edit();
editor.putInt(PREF_PREFIX+"fps"+mRequestedQuality.framerate+","+mCameraImageFormat+","+mRequestedQuality.resX+mRequestedQuality.resY, mQuality.framerate);
editor.commit();
}
} catch (InterruptedException e) {}
mCamera.setPreviewCallback(null);
}
}