File: ARDCaptureController.m

package info (click to toggle)
firefox-esr 140.4.0esr-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 4,539,276 kB
  • sloc: cpp: 7,381,286; javascript: 6,388,710; ansic: 3,710,139; python: 1,393,780; xml: 628,165; asm: 426,918; java: 184,004; sh: 65,742; makefile: 19,302; objc: 13,059; perl: 12,912; yacc: 4,583; cs: 3,846; pascal: 3,352; lex: 1,720; ruby: 1,226; exp: 762; php: 436; lisp: 258; awk: 247; sql: 66; sed: 54; csh: 10
file content (125 lines) | stat: -rw-r--r-- 3,761 bytes parent folder | download | duplicates (9)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
/*
 *  Copyright 2017 The WebRTC Project Authors. All rights reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

#import "ARDCaptureController.h"

#import "sdk/objc/base/RTCLogging.h"

#import "ARDSettingsModel.h"

const Float64 kFramerateLimit = 30.0;

@implementation ARDCaptureController {
  RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer;
  ARDSettingsModel *_settings;
  BOOL _usingFrontCamera;
}

- (instancetype)initWithCapturer:
                    (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
                        settings:(ARDSettingsModel *)settings {
  self = [super init];
  if (self) {
    _capturer = capturer;
    _settings = settings;
    _usingFrontCamera = YES;
  }
  return self;
}

- (void)startCapture {
  [self startCapture:nil];
}

- (void)startCapture:(void (^)(NSError *))completion {
  AVCaptureDevicePosition position = _usingFrontCamera ?
      AVCaptureDevicePositionFront :
      AVCaptureDevicePositionBack;
  AVCaptureDevice *device = [self findDeviceForPosition:position];
  AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];

  if (format == nil) {
    RTCLogError(@"No valid formats for device %@", device);
    NSAssert(NO, @"");

    return;
  }

  NSInteger fps = [self selectFpsForFormat:format];

  [_capturer startCaptureWithDevice:device
                             format:format
                                fps:fps
                  completionHandler:completion];
}

- (void)stopCapture {
  [_capturer stopCapture];
}

- (void)switchCamera {
  _usingFrontCamera = !_usingFrontCamera;
  [self startCapture:nil];
}

- (void)switchCamera:(void (^)(NSError *))completion {
  _usingFrontCamera = !_usingFrontCamera;
  [self startCapture:completion];
}

#pragma mark - Private

- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
  NSArray<AVCaptureDevice *> *captureDevices =
      [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
  for (AVCaptureDevice *device in captureDevices) {
    if (device.position == position) {
      return device;
    }
  }
  return captureDevices[0];
}

- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
  NSArray<AVCaptureDeviceFormat *> *formats =
      [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device];
  int targetWidth = [_settings currentVideoResolutionWidthFromStore];
  int targetHeight = [_settings currentVideoResolutionHeightFromStore];
  AVCaptureDeviceFormat *selectedFormat = nil;
  int currentDiff = INT_MAX;

  for (AVCaptureDeviceFormat *format in formats) {
    CMVideoDimensions dimension =
        CMVideoFormatDescriptionGetDimensions(format.formatDescription);
    FourCharCode pixelFormat =
        CMFormatDescriptionGetMediaSubType(format.formatDescription);
    int diff = abs(targetWidth - dimension.width) +
        abs(targetHeight - dimension.height);
    if (diff < currentDiff) {
      selectedFormat = format;
      currentDiff = diff;
    } else if (diff == currentDiff &&
               pixelFormat == [_capturer preferredOutputPixelFormat]) {
      selectedFormat = format;
    }
  }

  return selectedFormat;
}

- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format {
  Float64 maxSupportedFramerate = 0;
  for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) {
    maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate);
  }
  return fmin(maxSupportedFramerate, kFramerateLimit);
}

@end