diff --git a/README.md b/README.md index 62ffd04..53afac5 100755 --- a/README.md +++ b/README.md @@ -88,13 +88,27 @@ cameraManager.stopVideoRecording({ (videoURL, recordError) -> Void in }) ``` -To zoom in manually: +To zoom manually: ```swift +// Zoom in let zoomScale = CGFloat(2.0) cameraManager.zoom(zoomScale) + +// Zoom out (uses ultra-wide camera on supported devices) +let zoomScale = CGFloat(1.0) +cameraManager.zoom(zoomScale) ``` +The zoom range is automatically determined by the device's camera capabilities using `minAvailableVideoZoomFactor` and `maxAvailableVideoZoomFactor`. + +**Ultra-Wide Camera Support:** +The library automatically detects and uses multi-camera virtual devices (iPhone 11+) that include ultra-wide lenses: +- iPhone 11 Pro/12 Pro/13 Pro/14 Pro: `.builtInTripleCamera` (ultra-wide + wide + telephoto) +- iPhone 11/13/14: `.builtInDualWideCamera` (ultra-wide + wide) + +On these devices, setting zoom to 1.0 automatically uses the ultra-wide lens. iOS handles the camera switching seamlessly as you zoom in and out. + ### Properties You can set input device to front or back camera. `(Default: .Back)` diff --git a/Sources/CameraManager.swift b/Sources/CameraManager.swift index a568cea..4844798 100644 --- a/Sources/CameraManager.swift +++ b/Sources/CameraManager.swift @@ -260,7 +260,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest _updateCameraDevice(cameraDevice) _updateIlluminationMode(flashMode) _setupMaxZoomScale() - _zoom(0) + _zoom(1) _orientationChanged() } } @@ -292,7 +292,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest _setupOutputMode(cameraOutputMode, oldCameraOutputMode: oldValue) } _setupMaxZoomScale() - _zoom(0) + _zoom(1) } } } @@ -348,14 +348,34 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest }() fileprivate lazy var backCameraDevice: AVCaptureDevice? = { - AVCaptureDevice.videoDevices.filter { $0.position == .back }.first + // Try to get multi-camera virtual devices first (supports ultra-wide) + var deviceTypes: [AVCaptureDevice.DeviceType] = [.builtInWideAngleCamera] + + if #available(iOS 13.0, *) { + // Prefer virtual devices that support multiple cameras including ultra-wide + deviceTypes = [ + .builtInTripleCamera, // iPhone 11 Pro, 12 Pro, 13 Pro, 14 Pro, etc. + .builtInDualWideCamera, // iPhone 11, 13, 14 (wide + ultra-wide) + .builtInDualCamera, // iPhone 7 Plus, 8 Plus, X, XS (wide + telephoto) + .builtInWideAngleCamera // Fallback for single camera devices + ] + } + + let discoverySession = AVCaptureDevice.DiscoverySession( + deviceTypes: deviceTypes, + mediaType: .video, + position: .back + ) + + return discoverySession.devices.first }() fileprivate lazy var mic: AVCaptureDevice? = { AVCaptureDevice.default(for: AVMediaType.audio) }() - fileprivate var stillImageOutput: AVCaptureStillImageOutput? + fileprivate var photoOutput: AVCapturePhotoOutput? + fileprivate var photoCaptureCompletion: ((CaptureResult) -> Void)? fileprivate var movieOutput: AVCaptureMovieFileOutput? fileprivate var previewLayer: AVCaptureVideoPreviewLayer? fileprivate var library: PHPhotoLibrary? @@ -366,6 +386,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate var zoomScale = CGFloat(1.0) fileprivate var beginZoomScale = CGFloat(1.0) fileprivate var maxZoomScale = CGFloat(1.0) + fileprivate var minZoomScale = CGFloat(1.0) fileprivate func _tempFilePath() -> URL { let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tempMovie\(Date().timeIntervalSince1970)").appendingPathExtension("mp4") @@ -498,7 +519,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest frontCameraDevice = nil backCameraDevice = nil mic = nil - stillImageOutput = nil + photoOutput = nil + photoCaptureCompletion = nil movieOutput = nil animateCameraDeviceChange = oldAnimationValue } @@ -688,39 +710,27 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest return } - _updateIlluminationMode(flashMode) - sessionQueue.async { - let stillImageOutput = self._getStillImageOutput() - if let connection = stillImageOutput.connection(with: AVMediaType.video), - connection.isEnabled { - if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, - self.shouldFlipFrontCameraImage { - connection.isVideoMirrored = true - } - if connection.isVideoOrientationSupported { - connection.videoOrientation = self._currentCaptureVideoOrientation() - } - - stillImageOutput.captureStillImageAsynchronously(from: connection, completionHandler: { [weak self] sample, error in - - if let error = error { - self?._show(NSLocalizedString("Error", comment: ""), message: error.localizedDescription) - imageCompletion(.failure(error)) - return - } - - guard let sample = sample else { imageCompletion(.failure(CaptureError.noSampleBuffer)); return } - if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sample) { - imageCompletion(CaptureResult(imageData)) - } else { - imageCompletion(.failure(CaptureError.noImageData)) - } - - }) - } else { + let photoOutput = self._getPhotoOutput() + guard let connection = photoOutput.connection(with: AVMediaType.video), connection.isEnabled else { imageCompletion(.failure(CaptureError.noVideoConnection)) + return + } + if self.cameraDevice == CameraDevice.front, connection.isVideoMirroringSupported, + self.shouldFlipFrontCameraImage { + connection.isVideoMirrored = true + } + if connection.isVideoOrientationSupported { + connection.videoOrientation = self._currentCaptureVideoOrientation() + } + + let settings = AVCapturePhotoSettings() + if self.cameraDevice == .back, photoOutput.supportedFlashModes.contains(AVCaptureDevice.FlashMode(rawValue: self.flashMode.rawValue) ?? .off) { + settings.flashMode = AVCaptureDevice.FlashMode(rawValue: self.flashMode.rawValue) ?? .off } + + self.photoCaptureCompletion = imageCompletion + photoOutput.capturePhoto(with: settings, delegate: self) } } @@ -740,7 +750,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest /** Starts recording a video with or without voice as in the session preset. */ - open func startRecordingVideo() { + open func startRecordingVideo(toURL url: URL? = nil) { guard cameraOutputMode != .stillImage else { _show(NSLocalizedString("Capture session output still image", comment: ""), message: NSLocalizedString("I can only take pictures", comment: "")) return @@ -772,7 +782,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest _updateIlluminationMode(flashMode) - videoOutput.startRecording(to: _tempFilePath(), recordingDelegate: self) + videoOutput.startRecording(to: url ?? _tempFilePath(), recordingDelegate: self) } /** @@ -971,7 +981,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest let captureDevice = device try captureDevice?.lockForConfiguration() - zoomScale = max(1.0, min(beginZoomScale * scale, maxZoomScale)) + zoomScale = max(minZoomScale, min(beginZoomScale * scale, maxZoomScale)) captureDevice?.videoZoomFactor = zoomScale @@ -1283,20 +1293,20 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } - fileprivate func _getStillImageOutput() -> AVCaptureStillImageOutput { - if let stillImageOutput = stillImageOutput, let connection = stillImageOutput.connection(with: AVMediaType.video), + fileprivate func _getPhotoOutput() -> AVCapturePhotoOutput { + if let photoOutput = photoOutput, let connection = photoOutput.connection(with: AVMediaType.video), connection.isActive { - return stillImageOutput + return photoOutput } - let newStillImageOutput = AVCaptureStillImageOutput() - stillImageOutput = newStillImageOutput + let newPhotoOutput = AVCapturePhotoOutput() + photoOutput = newPhotoOutput if let captureSession = captureSession, - captureSession.canAddOutput(newStillImageOutput) { + captureSession.canAddOutput(newPhotoOutput) { captureSession.beginConfiguration() - captureSession.addOutput(newStillImageOutput) + captureSession.addOutput(newPhotoOutput) captureSession.commitConfiguration() } - return newStillImageOutput + return newPhotoOutput } @objc fileprivate func _orientationChanged() { @@ -1304,7 +1314,7 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest switch cameraOutputMode { case .stillImage: - currentConnection = stillImageOutput?.connection(with: AVMediaType.video) + currentConnection = photoOutput?.connection(with: AVMediaType.video) case .videoOnly, .videoWithMic: currentConnection = _getMovieOutput().connection(with: AVMediaType.video) if let location = locationManager?.latestLocation { @@ -1541,15 +1551,41 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _setupMaxZoomScale() { var maxZoom = CGFloat(1.0) + var minZoom = CGFloat(1.0) + var defaultZoom = CGFloat(1.0) beginZoomScale = CGFloat(1.0) - + if cameraDevice == .back, let backCameraDevice = backCameraDevice { maxZoom = backCameraDevice.activeFormat.videoMaxZoomFactor + minZoom = backCameraDevice.minAvailableVideoZoomFactor + + // For multi-camera devices, start at the first switchover point (main wide-angle camera) + // This represents "1x" zoom in the Camera app + if #available(iOS 13.0, *), + let switchOverFactors = backCameraDevice.virtualDeviceSwitchOverVideoZoomFactors as? [CGFloat], + let firstSwitchOver = switchOverFactors.first { + defaultZoom = firstSwitchOver + } else { + defaultZoom = minZoom + } } else if cameraDevice == .front, let frontCameraDevice = frontCameraDevice { maxZoom = frontCameraDevice.activeFormat.videoMaxZoomFactor + minZoom = frontCameraDevice.minAvailableVideoZoomFactor + + // For multi-camera front devices, start at the first switchover point + if #available(iOS 13.0, *), + let switchOverFactors = frontCameraDevice.virtualDeviceSwitchOverVideoZoomFactors as? [CGFloat], + let firstSwitchOver = switchOverFactors.first { + defaultZoom = firstSwitchOver + } else { + defaultZoom = minZoom + } } - + maxZoomScale = maxZoom + minZoomScale = minZoom + zoomScale = defaultZoom + beginZoomScale = defaultZoom } fileprivate func _checkIfCameraIsAvailable() -> CameraState { @@ -1578,8 +1614,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest // remove current setting switch cameraOutputToRemove { case .stillImage: - if let validStillImageOutput = stillImageOutput { - captureSession?.removeOutput(validStillImageOutput) + if let validPhotoOutput = photoOutput { + captureSession?.removeOutput(validPhotoOutput) } case .videoOnly, .videoWithMic: if let validMovieOutput = movieOutput { @@ -1596,10 +1632,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest // configure new devices switch newCameraOutputMode { case .stillImage: - let validStillImageOutput = _getStillImageOutput() + let validPhotoOutput = _getPhotoOutput() if let captureSession = captureSession, - captureSession.canAddOutput(validStillImageOutput) { - captureSession.addOutput(validStillImageOutput) + captureSession.canAddOutput(validPhotoOutput) { + captureSession.addOutput(validPhotoOutput) } case .videoOnly, .videoWithMic: let videoMovieOutput = _getMovieOutput() @@ -1619,8 +1655,8 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } fileprivate func _setupOutputs() { - if stillImageOutput == nil { - stillImageOutput = AVCaptureStillImageOutput() + if photoOutput == nil { + photoOutput = AVCapturePhotoOutput() } if movieOutput == nil { movieOutput = _getMovieOutput() @@ -1822,11 +1858,10 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest fileprivate func _updateIlluminationMode(_ mode: CameraFlashMode) { if cameraOutputMode != .stillImage { _updateTorch(mode) - } else { - _updateFlash(mode) } + // For stillImage, flash is applied via AVCapturePhotoSettings at capture time } - + fileprivate func _updateTorch(_: CameraFlashMode) { captureSession?.beginConfiguration() defer { captureSession?.commitConfiguration() } @@ -1846,22 +1881,6 @@ open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGest } } - fileprivate func _updateFlash(_ flashMode: CameraFlashMode) { - captureSession?.beginConfiguration() - defer { captureSession?.commitConfiguration() } - for captureDevice in AVCaptureDevice.videoDevices { - guard let avFlashMode = AVCaptureDevice.FlashMode(rawValue: flashMode.rawValue) else { continue } - if captureDevice.isFlashModeSupported(avFlashMode) { - do { - try captureDevice.lockForConfiguration() - captureDevice.flashMode = avFlashMode - captureDevice.unlockForConfiguration() - } catch { - return - } - } - } - } fileprivate func _performShutterAnimation(_ completion: (() -> Void)?) { if let validPreviewLayer = previewLayer { @@ -2117,6 +2136,22 @@ extension PHPhotoLibrary { } } +extension CameraManager: AVCapturePhotoCaptureDelegate { + public func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + let completion = photoCaptureCompletion + photoCaptureCompletion = nil + if let error = error { + completion?(.failure(error)) + return + } + guard let imageData = photo.fileDataRepresentation() else { + completion?(.failure(CaptureError.noImageData)) + return + } + completion?(CaptureResult(imageData)) + } +} + extension CameraManager: AVCaptureMetadataOutputObjectsDelegate { /** Called when a QR code is detected.