From 574ce44aa89b03a297cdf9f54d368a95ffcba875 Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Mon, 28 Apr 2025 17:38:24 +0200 Subject: [PATCH 01/11] Migrate FLTCam to Swift --- .../camera/camera_avfoundation/CHANGELOG.md | 4 + .../CameraPluginDelegatingMethodTests.swift | 2 +- .../ios/RunnerTests/CameraTestUtils.swift | 15 +- .../FLTCamSetDeviceOrientationTests.swift | 2 +- .../ios/RunnerTests/Mocks/MockFLTCam.swift | 214 +-- .../ios/RunnerTests/PhotoCaptureTests.swift | 38 +- .../ios/RunnerTests/SampleBufferTests.swift | 51 +- .../ios/RunnerTests/StreamingTests.swift | 11 +- .../camera_avfoundation/CameraPlugin.swift | 18 +- .../Sources/camera_avfoundation/FLTCam.swift | 1547 +++++++++++++++++ .../FLTImageStreamHandler.swift | 36 + .../camera_avfoundation/QueueUtils.swift | 7 + .../Sources/camera_avfoundation_objc/FLTCam.m | 1441 --------------- .../FLTCamConfiguration.m | 2 + .../camera_avfoundation_objc/QueueUtils.m | 6 - .../include/camera_avfoundation/FLTCam.h | 109 -- .../camera_avfoundation/FLTCamConfiguration.h | 5 +- .../include/camera_avfoundation/FLTCam_Test.h | 56 - .../include/camera_avfoundation/QueueUtils.h | 12 - .../camera_avfoundation/camera_avfoundation.h | 1 - .../camera/camera_avfoundation/pubspec.yaml | 2 +- 21 files changed, 1783 insertions(+), 1796 deletions(-) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index 51ad42e38854..c90b8d6e45f8 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.19+1 + +* Migrates the FLTCam class to Swift. + ## 0.9.19 * Migrates the CameraPlugin class to Swift. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift index 0bf445a131bf..27c8d44d4193 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift @@ -39,7 +39,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let targetOrientation = FCPPlatformDeviceOrientation.landscapeLeft var lockCaptureCalled = false - mockCamera.lockCaptureStub = { orientation in + mockCamera.lockCaptureOrientationStub = { orientation in XCTAssertEqual(orientation, targetOrientation) lockCaptureCalled = true } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index b624c8d56920..12d8d8333e05 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -6,9 +6,9 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - import camera_avfoundation_objc + @testable import camera_avfoundation_objc #else - import camera_avfoundation + @testable import camera_avfoundation #endif /// Utils for creating default class instances used in tests @@ -61,6 +61,7 @@ enum CameraTestUtils { resolutionPreset: FCPPlatformResolutionPreset.medium), mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper(), captureDeviceFactory: { _ in captureDeviceMock }, + audioCaptureDeviceFactory: { MockCaptureDevice() }, captureSessionFactory: { videoSessionMock }, captureSessionQueue: captureSessionQueue, captureDeviceInputFactory: MockCaptureDeviceInputFactory(), @@ -80,16 +81,18 @@ enum CameraTestUtils { return configuration } - static func createTestCamera(_ configuration: FLTCamConfiguration) -> FLTCam { - return FLTCam(configuration: configuration, error: nil) + static func createTestCamera(_ configuration: FLTCamConfiguration) -> FLTDefaultCam { + let camera = try? FLTDefaultCam(configuration: configuration) + + return camera! } - static func createTestCamera() -> FLTCam { + static func createTestCamera() -> FLTDefaultCam { return createTestCamera(createTestCameraConfiguration()) } static func createCameraWithCaptureSessionQueue(_ captureSessionQueue: DispatchQueue) - -> FLTCam + -> FLTDefaultCam { let configuration = createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift index b0baa8ccc83b..fd24d23798e8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift @@ -73,7 +73,7 @@ final class FLTCamSetDeviceOrientationTests: XCTestCase { videoSetVideoOrientationCalled = true } - camera.lockCapture(FCPPlatformDeviceOrientation.portraitDown) + camera.lockCaptureOrientation(FCPPlatformDeviceOrientation.portraitDown) camera.setDeviceOrientation(.landscapeLeft) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift index c4ca7ad9f9f9..0904a188979c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift @@ -2,207 +2,207 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import camera_avfoundation +@testable import camera_avfoundation // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - import camera_avfoundation_objc + @testable import camera_avfoundation_objc #endif -final class MockFLTCam: FLTCam { - var setOnFrameAvailableStub: ((() -> Void) -> Void)? - var setDartApiStub: ((FCPCameraEventApi) -> Void)? - var setFocusModeStub: ((FCPPlatformFocusMode) -> Void)? - var getMinimumAvailableZoomFactorStub: (() -> CGFloat)? - var getMaximumAvailableZoomFactorStub: (() -> CGFloat)? +final class MockFLTCam: NSObject, FLTCam { + var setDartApiStub: ((FCPCameraEventApi?) -> Void)? + var setOnFrameAvailableStub: (((() -> Void)?) -> Void)? var getMinimumExposureOffsetStub: (() -> CGFloat)? var getMaximumExposureOffsetStub: (() -> CGFloat)? - + var getMinimumAvailableZoomFactorStub: (() -> CGFloat)? + var getMaximumAvailableZoomFactorStub: (() -> CGFloat)? + var setUpCaptureSessionForAudioIfNeededStub: (() -> Void)? + var receivedImageStreamDataStub: (() -> Void)? var startStub: (() -> Void)? - var setDeviceOrientationStub: ((UIDeviceOrientation) -> Void)? - var captureToFileStub: ((((String?, FlutterError?) -> Void)?) -> Void)? - var setImageFileFormatStub: ((FCPPlatformImageFileFormat) -> Void)? var startVideoRecordingStub: ((@escaping (FlutterError?) -> Void, FlutterBinaryMessenger?) -> Void)? - var stopVideoRecordingStub: ((((String?, FlutterError?) -> Void)?) -> Void)? var pauseVideoRecordingStub: (() -> Void)? var resumeVideoRecordingStub: (() -> Void)? - var lockCaptureStub: ((FCPPlatformDeviceOrientation) -> Void)? + var stopVideoRecordingStub: ((((String?, FlutterError?) -> Void)?) -> Void)? + var captureToFileStub: ((((String?, FlutterError?) -> Void)?) -> Void)? + var setDeviceOrientationStub: ((UIDeviceOrientation) -> Void)? + var lockCaptureOrientationStub: ((FCPPlatformDeviceOrientation) -> Void)? var unlockCaptureOrientationStub: (() -> Void)? - var setFlashModeStub: ((FCPPlatformFlashMode, ((FlutterError?) -> Void)?) -> Void)? + var setImageFileFormatStub: ((FCPPlatformImageFileFormat) -> Void)? var setExposureModeStub: ((FCPPlatformExposureMode) -> Void)? - var receivedImageStreamDataStub: (() -> Void)? + var setExposureOffsetStub: ((Double) -> Void)? + var setExposurePointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? + var setFocusModeStub: ((FCPPlatformFocusMode) -> Void)? + var setFocusPointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? + var setZoomLevelStub: ((CGFloat, ((FlutterError?) -> Void)?) -> Void)? + var setFlashModeStub: ((FCPPlatformFlashMode, ((FlutterError?) -> Void)?) -> Void)? var pausePreviewStub: (() -> Void)? var resumePreviewStub: (() -> Void)? var setDescriptionWhileRecordingStub: ((String, ((FlutterError?) -> Void)?) -> Void)? - var setExposurePointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? - var setFocusPointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? - var setExposureOffsetStub: ((Double) -> Void)? var startImageStreamStub: ((FlutterBinaryMessenger) -> Void)? var stopImageStreamStub: (() -> Void)? - var setZoomLevelStub: ((CGFloat, ((FlutterError?) -> Void)?) -> Void)? - var setUpCaptureSessionForAudioIfNeededStub: (() -> Void)? - - override var onFrameAvailable: (() -> Void) { - get { - return super.onFrameAvailable - } - set { - setOnFrameAvailableStub?(newValue) - } - } - override var dartAPI: FCPCameraEventApi { + var dartAPI: FCPCameraEventApi? { get { - return super.dartAPI + preconditionFailure("Attempted to access unimplemented property: dartAPI") } set { setDartApiStub?(newValue) } } - override var minimumAvailableZoomFactor: CGFloat { + var onFrameAvailable: (() -> Void)? { get { - return getMinimumAvailableZoomFactorStub?() ?? super.minimumAvailableZoomFactor + preconditionFailure("Attempted to access unimplemented property: onFrameAvailable") } set { - super.minimumAvailableZoomFactor = newValue + setOnFrameAvailableStub?(newValue) } } - override var maximumAvailableZoomFactor: CGFloat { - get { - return getMaximumAvailableZoomFactorStub?() ?? super.maximumAvailableZoomFactor - } - set { - super.maximumAvailableZoomFactor = newValue - } + var videoFormat: FourCharCode = kCVPixelFormatType_32BGRA + + var isPreviewPaused: Bool = false + + var minimumExposureOffset: CGFloat { + return getMinimumExposureOffsetStub?() ?? 0 } - override var minimumExposureOffset: CGFloat { - get { - return getMinimumExposureOffsetStub?() ?? super.minimumExposureOffset - } - set { - super.minimumExposureOffset = newValue - } + var maximumExposureOffset: CGFloat { + return getMaximumExposureOffsetStub?() ?? 0 } - override var maximumExposureOffset: CGFloat { - get { - return getMaximumExposureOffsetStub?() ?? super.maximumExposureOffset - } - set { - super.maximumExposureOffset = newValue - } + var minimumAvailableZoomFactor: CGFloat { + return getMinimumAvailableZoomFactorStub?() ?? 0 } - override func start() { - startStub?() + var maximumAvailableZoomFactor: CGFloat { + return getMaximumAvailableZoomFactorStub?() ?? 0 } - override func setDeviceOrientation(_ orientation: UIDeviceOrientation) { - setDeviceOrientationStub?(orientation) + func setUpCaptureSessionForAudioIfNeeded() { + setUpCaptureSessionForAudioIfNeededStub?() } - override func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { - captureToFileStub?(completion) + func reportInitializationState() {} + + func receivedImageStreamData() { + receivedImageStreamDataStub?() } - override func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { - setImageFileFormatStub?(fileFormat) + func start() { + startStub?() } - override func startVideoRecording( + func stop() {} + + func startVideoRecording( completion: @escaping (FlutterError?) -> Void, messengerForStreaming messenger: FlutterBinaryMessenger? ) { startVideoRecordingStub?(completion, messenger) } - override func stopVideoRecording(completion: ((String?, FlutterError?) -> Void)?) { + func pauseVideoRecording() { + pauseVideoRecordingStub?() + } + + func resumeVideoRecording() { + resumeVideoRecordingStub?() + } + + func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { stopVideoRecordingStub?(completion) } - override func pauseVideoRecording() { - pauseVideoRecordingStub?() + func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + captureToFileStub?(completion) } - override func resumeVideoRecording() { - resumeVideoRecordingStub?() + func setDeviceOrientation(_ orientation: UIDeviceOrientation) { + setDeviceOrientationStub?(orientation) } - override func lockCapture(_ orientation: FCPPlatformDeviceOrientation) { - lockCaptureStub?(orientation) + func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) { + lockCaptureOrientationStub?(orientation) } - override func unlockCaptureOrientation() { + func unlockCaptureOrientation() { unlockCaptureOrientationStub?() } - override func setExposureMode(_ mode: FCPPlatformExposureMode) { + func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + setImageFileFormatStub?(fileFormat) + } + + func setExposureMode(_ mode: FCPPlatformExposureMode) { setExposureModeStub?(mode) } - override func setFocusMode(_ mode: FCPPlatformFocusMode) { - setFocusModeStub?(mode) + func setExposureOffset(_ offset: Double) { + setExposureOffsetStub?(offset) } - override func setFlashMode( - _ mode: FCPPlatformFlashMode, withCompletion completion: @escaping (FlutterError?) -> Void + func setExposurePoint( + _ point: FCPPlatformPoint?, withCompletion: @escaping (FlutterError?) -> Void ) { - setFlashModeStub?(mode, completion) + setExposurePointStub?(point, withCompletion) } - override func receivedImageStreamData() { - receivedImageStreamDataStub?() + func setFocusMode(_ mode: FCPPlatformFocusMode) { + setFocusModeStub?(mode) } - override func pausePreview() { - pausePreviewStub?() + func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { + setFocusPointStub?(point, completion) } - override func resumePreview() { - resumePreviewStub?() + func setZoomLevel( + _ zoom: CGFloat, + withCompletion completion: @escaping (FlutterError?) -> Void + ) { + setZoomLevelStub?(zoom, completion) } - override func setDescriptionWhileRecording( - _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void + func setFlashMode( + _ mode: FCPPlatformFlashMode, + withCompletion completion: @escaping (FlutterError?) -> Void ) { - setDescriptionWhileRecordingStub?(cameraName, completion) + setFlashModeStub?(mode, completion) } - override func setExposurePoint( - _ point: FCPPlatformPoint?, withCompletion completion: ((FlutterError?) -> Void)? - ) { - setExposurePointStub?(point, completion) + func pausePreview() { + pausePreviewStub?() } - override func setFocusPoint( - _ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void - ) { - setFocusPointStub?(point, completion) + func resumePreview() { + resumePreviewStub?() } - override func setExposureOffset(_ offset: Double) { - setExposureOffsetStub?(offset) + func setDescriptionWhileRecording( + _ cameraName: String, + withCompletion completion: @escaping (FlutterError?) -> Void + ) { + setDescriptionWhileRecordingStub?(cameraName, completion) } - override func startImageStream(with messenger: FlutterBinaryMessenger) { + func startImageStream(with messenger: FlutterBinaryMessenger) { startImageStreamStub?(messenger) } - override func stopImageStream() { + func stopImageStream() { stopImageStreamStub?() } - override func setZoomLevel( - _ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void - ) { - setZoomLevelStub?(zoom, completion) - } + func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) {} - override func setUpCaptureSessionForAudioIfNeeded() { - setUpCaptureSessionForAudioIfNeededStub?() + func close() {} + + func copyPixelBuffer() -> Unmanaged? { + return nil } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift index e7986fcce729..cad8337bd86a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift @@ -14,7 +14,7 @@ import XCTest /// Includes test cases related to photo capture operations for FLTCam class. final class PhotoCaptureTests: XCTestCase { - private func createCam(with captureSessionQueue: DispatchQueue) -> FLTCam { + private func createCam(with captureSessionQueue: DispatchQueue) -> FLTDefaultCam { let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue return CameraTestUtils.createTestCamera(configuration) @@ -24,15 +24,16 @@ final class PhotoCaptureTests: XCTestCase { let errorExpectation = expectation( description: "Must send error to result if save photo delegate completes with error.") let captureSessionQueue = DispatchQueue(label: "capture_session_queue") - FLTDispatchQueueSetSpecific(captureSessionQueue, FLTCaptureSessionQueueSpecific) + captureSessionQueue.setSpecific( + key: fltCaptureSessionQueueSpecificKey, + value: fltCaptureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) let error = NSError(domain: "test", code: 0, userInfo: nil) let mockOutput = MockCapturePhotoOutput() mockOutput.capturePhotoWithSettingsStub = { settings, photoDelegate in let delegate = - cam.inProgressSavePhotoDelegates.object(forKey: settings.uniqueID) - as? FLTSavePhotoDelegate + cam.inProgressSavePhotoDelegates[settings.uniqueID] // Completion runs on IO queue. let ioQueue = DispatchQueue(label: "io_queue") ioQueue.async { @@ -57,15 +58,17 @@ final class PhotoCaptureTests: XCTestCase { let pathExpectation = expectation( description: "Must send file path to result if save photo delegate completes with file path.") let captureSessionQueue = DispatchQueue(label: "capture_session_queue") - FLTDispatchQueueSetSpecific(captureSessionQueue, FLTCaptureSessionQueueSpecific) + captureSessionQueue.setSpecific( + key: fltCaptureSessionQueueSpecificKey, + value: fltCaptureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) let filePath = "test" let mockOutput = MockCapturePhotoOutput() mockOutput.capturePhotoWithSettingsStub = { settings, photoDelegate in let delegate = - cam.inProgressSavePhotoDelegates.object(forKey: settings.uniqueID) - as? FLTSavePhotoDelegate + cam.inProgressSavePhotoDelegates[settings.uniqueID] + // Completion runs on IO queue. let ioQueue = DispatchQueue(label: "io_queue") ioQueue.async { @@ -90,7 +93,9 @@ final class PhotoCaptureTests: XCTestCase { description: "Test must set extension to heif if availablePhotoCodecTypes contains HEVC.") let captureSessionQueue = DispatchQueue(label: "capture_session_queue") - FLTDispatchQueueSetSpecific(captureSessionQueue, FLTCaptureSessionQueueSpecific) + captureSessionQueue.setSpecific( + key: fltCaptureSessionQueueSpecificKey, + value: fltCaptureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) cam.setImageFileFormat(FCPPlatformImageFileFormat.heif) @@ -98,8 +103,7 @@ final class PhotoCaptureTests: XCTestCase { mockOutput.availablePhotoCodecTypes = [AVVideoCodecType.hevc] mockOutput.capturePhotoWithSettingsStub = { settings, photoDelegate in let delegate = - cam.inProgressSavePhotoDelegates.object(forKey: settings.uniqueID) - as? FLTSavePhotoDelegate + cam.inProgressSavePhotoDelegates[settings.uniqueID] // Completion runs on IO queue. let ioQueue = DispatchQueue(label: "io_queue") ioQueue.async { @@ -125,15 +129,16 @@ final class PhotoCaptureTests: XCTestCase { "Test must set extension to jpg if availablePhotoCodecTypes does not contain HEVC.") let captureSessionQueue = DispatchQueue(label: "capture_session_queue") - FLTDispatchQueueSetSpecific(captureSessionQueue, FLTCaptureSessionQueueSpecific) + captureSessionQueue.setSpecific( + key: fltCaptureSessionQueueSpecificKey, + value: fltCaptureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) cam.setImageFileFormat(FCPPlatformImageFileFormat.heif) let mockOutput = MockCapturePhotoOutput() mockOutput.capturePhotoWithSettingsStub = { settings, photoDelegate in let delegate = - cam.inProgressSavePhotoDelegates.object(forKey: settings.uniqueID) - as? FLTSavePhotoDelegate + cam.inProgressSavePhotoDelegates[settings.uniqueID] // Completion runs on IO queue. let ioQueue = DispatchQueue(label: "io_queue") ioQueue.async { @@ -170,7 +175,9 @@ final class PhotoCaptureTests: XCTestCase { } let captureSessionQueue = DispatchQueue(label: "capture_session_queue") - FLTDispatchQueueSetSpecific(captureSessionQueue, FLTCaptureSessionQueueSpecific) + captureSessionQueue.setSpecific( + key: fltCaptureSessionQueueSpecificKey, + value: fltCaptureSessionQueueSpecificValue) let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue configuration.captureDeviceFactory = { _ in captureDeviceMock } @@ -180,8 +187,7 @@ final class PhotoCaptureTests: XCTestCase { let mockOutput = MockCapturePhotoOutput() mockOutput.capturePhotoWithSettingsStub = { settings, photoDelegate in let delegate = - cam.inProgressSavePhotoDelegates.object(forKey: settings.uniqueID) - as? FLTSavePhotoDelegate + cam.inProgressSavePhotoDelegates[settings.uniqueID] // Completion runs on IO queue. let ioQueue = DispatchQueue(label: "io_queue") ioQueue.async { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index 5b412d073955..50c5d7ef96c7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -69,11 +69,10 @@ private class FakeMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { /// Includes test cases related to sample buffer handling for FLTCam class. final class CameraSampleBufferTests: XCTestCase { private func createCamera() -> ( - FLTCam, + FLTDefaultCam, MockAssetWriter, MockAssetWriterInputPixelBufferAdaptor, - MockAssetWriterInput, - MockCaptureConnection + MockAssetWriterInput ) { let assetWriter = MockAssetWriter() let adaptor = MockAssetWriterInputPixelBufferAdaptor() @@ -96,8 +95,7 @@ final class CameraSampleBufferTests: XCTestCase { } return ( - FLTCam(configuration: configuration, error: nil), assetWriter, adaptor, input, - MockCaptureConnection() + CameraTestUtils.createTestCamera(configuration), assetWriter, adaptor, input ) } @@ -110,13 +108,12 @@ final class CameraSampleBufferTests: XCTestCase { } func testCopyPixelBuffer() { - let (camera, _, _, _, connectionMock) = createCamera() + let (camera, _, _, _) = createCamera() let capturedSampleBuffer = CameraTestUtils.createTestSampleBuffer() let capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer)! // Mimic sample buffer callback when captured a new video sample. camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: capturedSampleBuffer, - from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: capturedSampleBuffer) let deliveredPixelBuffer = camera.copyPixelBuffer()?.takeRetainedValue() XCTAssertEqual( deliveredPixelBuffer, capturedPixelBuffer, @@ -124,7 +121,7 @@ final class CameraSampleBufferTests: XCTestCase { } func testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPauseResumeRecording() { - let (camera, _, _, _, connectionMock) = createCamera() + let (camera, _, _, _) = createCamera() let sampleBuffer = CameraTestUtils.createTestSampleBuffer() let initialRetainCount = CFGetRetainCount(sampleBuffer) @@ -135,7 +132,7 @@ final class CameraSampleBufferTests: XCTestCase { camera.resumeVideoRecording() camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: sampleBuffer, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: sampleBuffer) let finalRetainCount = CFGetRetainCount(sampleBuffer) XCTAssertEqual( @@ -145,7 +142,7 @@ final class CameraSampleBufferTests: XCTestCase { } func testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples() { - let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, writerMock, adaptorMock, inputMock) = createCamera() var status = AVAssetWriter.Status.unknown writerMock.startWritingStub = { status = .writing @@ -170,18 +167,18 @@ final class CameraSampleBufferTests: XCTestCase { } camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureOutput(nil, didOutput: audioSample) + camera.captureOutput(nil, didOutput: audioSample) camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureOutput(nil, didOutput: audioSample) let expectedSamples = ["video", "audio"] XCTAssertEqual(writtenSamples, expectedSamples, "First appended sample must be video.") } func testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume() { - let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, writerMock, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() let audioSample = CameraTestUtils.createTestAudioSampleBuffer() @@ -215,17 +212,17 @@ final class CameraSampleBufferTests: XCTestCase { camera.pauseVideoRecording() camera.resumeVideoRecording() camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureOutput(nil, didOutput: audioSample) camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureOutput(nil, didOutput: audioSample) XCTAssert(videoAppended && audioAppended, "Video or audio was not appended.") } func testDidOutputSampleBufferMustNotAppendSampleWhenReadyForMoreMediaDataIsFalse() { - let (camera, _, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, _, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() @@ -240,18 +237,18 @@ final class CameraSampleBufferTests: XCTestCase { inputMock.readyForMoreMediaData = true sampleAppended = false camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) XCTAssertTrue(sampleAppended, "Sample was not appended.") inputMock.readyForMoreMediaData = false sampleAppended = false camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) XCTAssertFalse(sampleAppended, "Sample cannot be appended when readyForMoreMediaData is NO.") } func testStopVideoRecordingWithCompletionMustCallCompletion() { - let (camera, writerMock, _, _, _) = createCamera() + let (camera, writerMock, _, _) = createCamera() var status = AVAssetWriter.Status.unknown writerMock.startWritingStub = { @@ -279,7 +276,7 @@ final class CameraSampleBufferTests: XCTestCase { } func testStartWritingShouldNotBeCalledBetweenSampleCreationAndAppending() { - let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, writerMock, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() @@ -302,13 +299,13 @@ final class CameraSampleBufferTests: XCTestCase { let startWritingCalledBefore = startWritingCalled camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) XCTAssert( (startWritingCalledBefore && videoAppended) || (startWritingCalled && !videoAppended), "The startWriting was called between sample creation and appending.") camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, didOutput: videoSample) XCTAssert(videoAppended, "Video was not appended.") } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift index 3909b5be9c9a..0538e2a40cc6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift @@ -29,10 +29,13 @@ private class MockImageStreamHandler: FLTImageStreamHandler { } } + init() { + super.init(captureSessionQueue: DispatchQueue(label: "capture_session_queue")) + } } final class StreamingTests: XCTestCase { - private func createCamera() -> (FLTCam, CMSampleBuffer) { + private func createCamera() -> (FLTDefaultCam, CMSampleBuffer) { let captureSessionQueue = DispatchQueue(label: "testing") let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue @@ -59,7 +62,7 @@ final class StreamingTests: XCTestCase { streamingExpectation.expectedFulfillmentCount = 4 for _ in 0..<10 { - camera.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil) + camera.captureOutput(nil, didOutput: sampleBuffer) } waitForExpectations(timeout: 30, handler: nil) @@ -81,11 +84,11 @@ final class StreamingTests: XCTestCase { streamingExpectation.expectedFulfillmentCount = 5 for _ in 0..<10 { - camera.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil) + camera.captureOutput(nil, didOutput: sampleBuffer) } camera.receivedImageStreamData() - camera.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil) + camera.captureOutput(nil, didOutput: sampleBuffer) waitForExpectations(timeout: 30, handler: nil) } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index a058afb50e4a..f736b2c512b1 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -69,7 +69,9 @@ public final class CameraPlugin: NSObject, FlutterPlugin { super.init() - FLTDispatchQueueSetSpecific(captureSessionQueue, FLTCaptureSessionQueueSpecific) + captureSessionQueue.setSpecific( + key: fltCaptureSessionQueueSpecificKey, + value: fltCaptureSessionQueueSpecificValue) UIDevice.current.beginGeneratingDeviceOrientationNotifications() NotificationCenter.default.addObserver( @@ -238,18 +240,18 @@ extension CameraPlugin: FCPCameraApi { mediaSettings: settings, mediaSettingsWrapper: mediaSettingsAVWrapper, captureDeviceFactory: captureDeviceFactory, + audioCaptureDeviceFactory: { + FLTDefaultCaptureDevice(device: AVCaptureDevice.default(for: .audio)!) + }, captureSessionFactory: captureSessionFactory, captureSessionQueue: captureSessionQueue, captureDeviceInputFactory: captureDeviceInputFactory, initialCameraName: name ) - var error: NSError? - let newCamera = FLTCam(configuration: camConfiguration, error: &error) + do { + let newCamera = try FLTDefaultCam(configuration: camConfiguration) - if let error = error { - completion(nil, CameraPlugin.flutterErrorFromNSError(error)) - } else { camera?.close() camera = newCamera @@ -257,6 +259,8 @@ extension CameraPlugin: FCPCameraApi { guard let strongSelf = self else { return } completion(NSNumber(value: strongSelf.registry.register(newCamera)), nil) } + } catch let error as NSError { + completion(nil, CameraPlugin.flutterErrorFromNSError(error)) } } @@ -342,7 +346,7 @@ extension CameraPlugin: FCPCameraApi { completion: @escaping (FlutterError?) -> Void ) { captureSessionQueue.async { [weak self] in - self?.camera?.lockCapture(orientation) + self?.camera?.lockCaptureOrientation(orientation) completion(nil) } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift new file mode 100644 index 000000000000..a5492be99754 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift @@ -0,0 +1,1547 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation +import CoreMotion +import Flutter + +// Import Objectice-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +/// A class that manages camera's state and performs camera operations. +protocol FLTCam: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, + AVCaptureAudioDataOutputSampleBufferDelegate +{ + /// The API instance used to communicate with the Dart side of the plugin. + /// Once initially set, this should only ever be accessed on the main thread. + var dartAPI: FCPCameraEventApi? { get set } + + var onFrameAvailable: (() -> Void)? { get set } + + /// Format used for video and image streaming. + var videoFormat: FourCharCode { get set } + + var isPreviewPaused: Bool { get } + + var minimumAvailableZoomFactor: CGFloat { get } + var maximumAvailableZoomFactor: CGFloat { get } + var minimumExposureOffset: CGFloat { get } + var maximumExposureOffset: CGFloat { get } + + func setUpCaptureSessionForAudioIfNeeded() + + func reportInitializationState() + + /// Acknowledges the receipt of one image stream frame. + func receivedImageStreamData() + + func start() + func stop() + + /// Starts recording a video with an optional streaming messenger. + func startVideoRecording( + completion: @escaping (_ error: FlutterError?) -> Void, + messengerForStreaming: FlutterBinaryMessenger? + ) + func pauseVideoRecording() + func resumeVideoRecording() + func stopVideoRecording(completion: @escaping (_ path: String?, _ error: FlutterError?) -> Void) + + func captureToFile(completion: @escaping (_ path: String?, _ error: FlutterError?) -> Void) + + func setDeviceOrientation(_ orientation: UIDeviceOrientation) + func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) + func unlockCaptureOrientation() + + func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) + + func setExposureMode(_ mode: FCPPlatformExposureMode) + func setExposureOffset(_ offset: Double) + func setExposurePoint( + _ point: FCPPlatformPoint?, + withCompletion: @escaping (_ error: FlutterError?) -> Void + ) + + func setFocusMode(_ mode: FCPPlatformFocusMode) + func setFocusPoint( + _ point: FCPPlatformPoint?, + completion: @escaping (_ error: FlutterError?) -> Void + ) + + func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (_ error: FlutterError?) -> Void) + + func setFlashMode( + _ mode: FCPPlatformFlashMode, + withCompletion: @escaping (_ error: FlutterError?) -> Void + ) + + func pausePreview() + func resumePreview() + + func setDescriptionWhileRecording( + _ cameraName: String, + withCompletion: @escaping (_ error: FlutterError?) -> Void + ) + + func startImageStream(with: FlutterBinaryMessenger) + func stopImageStream() + + // Override to make `AVCaptureVideoDataOutputSampleBufferDelegate`/ + // `AVCaptureAudioDataOutputSampleBufferDelegate` method non optional + override func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) + + func close() +} + +class FLTDefaultCam: NSObject, FLTCam { + var dartAPI: FCPCameraEventApi? + var onFrameAvailable: (() -> Void)? + + var videoFormat: FourCharCode = kCVPixelFormatType_32BGRA { + didSet { + captureVideoOutput.videoSettings = [ + kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: videoFormat) + ] + } + } + + private(set) var isPreviewPaused = false + + var minimumExposureOffset: CGFloat { CGFloat(captureDevice.minExposureTargetBias) } + var maximumExposureOffset: CGFloat { CGFloat(captureDevice.maxExposureTargetBias) } + var minimumAvailableZoomFactor: CGFloat { captureDevice.minAvailableVideoZoomFactor } + var maximumAvailableZoomFactor: CGFloat { captureDevice.maxAvailableVideoZoomFactor } + + /// The queue on which `latestPixelBuffer` property is accessed. + /// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`. + private let pixelBufferSynchronizationQueue = DispatchQueue( + label: "io.flutter.camera.pixelBufferSynchronizationQueue") + + /// The queue on which captured photos (not videos) are written to disk. + /// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation. + private let photoIOQueue = DispatchQueue(label: "io.flutter.camera.photoIOQueue") + + /// All FLTCam's state access and capture session related operations should be run on this queue. + private let captureSessionQueue: DispatchQueue + + private let mediaSettings: FCPPlatformMediaSettings + private let mediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper + + private let videoCaptureSession: FLTCaptureSession + private let audioCaptureSession: FLTCaptureSession + + /// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. + private let captureDeviceFactory: CaptureDeviceFactory + private let audioCaptureDeviceFactory: AudioCaptureDeviceFactory + private let captureDeviceInputFactory: FLTCaptureDeviceInputFactory + private let assetWriterFactory: AssetWriterFactory + private let inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory + + /// A wrapper for CMVideoFormatDescriptionGetDimensions. + /// Allows for alternate implementations in tests. + private let videoDimensionsForFormat: VideoDimensionsForFormat + + private let deviceOrientationProvider: FLTDeviceOrientationProviding + private let motionManager = CMMotionManager() + + private(set) var captureDevice: FLTCaptureDevice + // Setter exposed for tests. + var captureVideoOutput: FLTCaptureVideoDataOutput + // Setter exposed for tests. + var capturePhotoOutput: FLTCapturePhotoOutput + private var captureVideoInput: FLTCaptureInput + + private var videoWriter: FLTAssetWriter? + private var videoWriterInput: FLTAssetWriterInput? + private var audioWriterInput: FLTAssetWriterInput? + private var assetWriterPixelBufferAdaptor: FLTAssetWriterInputPixelBufferAdaptor? + private var videoAdaptor: FLTAssetWriterInputPixelBufferAdaptor? + + /// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the + /// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the + /// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo + /// capture operations may overlap, so FLTCam has to keep track of multiple delegates in progress, + /// instead of just a single delegate reference. + private(set) var inProgressSavePhotoDelegates = [Int64: FLTSavePhotoDelegate]() + + private var imageStreamHandler: FLTImageStreamHandler? + + private var textureId: Int64? + private var previewSize: CGSize? + private var deviceOrientation = UIDeviceOrientation.unknown + + /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. + /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. + private var latestPixelBuffer: CVPixelBuffer? + + private var videoRecordingPath: String? + private var isRecording = false + private var isRecordingPaused = false + private var isFirstVideoSample = false + private var videoIsDisconnected = false + private var audioIsDisconnected = false + private var isAudioSetup = false + private var lastVideoSampleTime = CMTime.zero + private var lastAudioSampleTime = CMTime.zero + private var videoTimeOffset = CMTime.zero + private var audioTimeOffset = CMTime.zero + + /// True when images from the camera are being streamed. + private(set) var isStreamingImages = false + + /// Number of frames currently pending processing. + private var streamingPendingFramesCount = 0 + + /// Maximum number of frames pending processing. + /// To limit memory consumption, limit the number of frames pending processing. + /// After some testing, 4 was determined to be the best maximuńm value. + /// https://github.com/flutter/plugins/pull/4520#discussion_r766335637 + private var maxStreamingPendingFramesCount = 4 + + private var fileFormat = FCPPlatformImageFileFormat.jpeg + private var lockedCaptureOrientation = UIDeviceOrientation.unknown + private var exposureMode = FCPPlatformExposureMode.auto + private var focusMode = FCPPlatformFocusMode.auto + private var flashMode: FCPPlatformFlashMode + + private static func flutterErrorFromNSError(_ error: NSError) -> FlutterError { + return FlutterError( + code: "Error \(error.code)", + message: error.localizedDescription, + details: error.domain) + } + + // Returns frame rate supported by format closest to targetFrameRate. + private static func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double) + -> Double + { + var bestFrameRate = 0.0 + var minDistance = Double.greatestFiniteMagnitude + for range in format.videoSupportedFrameRateRanges { + let frameRate = min( + max(targetFrameRate, Double(range.minFrameRate)), Double(range.maxFrameRate)) + let distance = abs(frameRate - targetFrameRate) + if distance < minDistance { + bestFrameRate = frameRate + minDistance = distance + } + } + return bestFrameRate + } + + // Finds format with same resolution as current activeFormat in captureDevice for which + // bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. + // Preferred are formats with the same subtype as current activeFormat. Sets this format + // as activeFormat and also updates mediaSettings.framesPerSecond to value which + // bestFrameRateForFormat returned for that format. + private static func selectBestFormatForRequestedFrameRate( + captureDevice: FLTCaptureDevice, + mediaSettings: FCPPlatformMediaSettings, + targetFrameRate: Double, + videoDimensionsForFormat: (FLTCaptureDeviceFormat) -> CMVideoDimensions + ) { + let targetResolution = videoDimensionsForFormat(captureDevice.activeFormat) + let preferredSubType = CMFormatDescriptionGetMediaSubType( + captureDevice.activeFormat.formatDescription) + var bestFormat = captureDevice.activeFormat + var _bestFrameRate = bestFrameRate(for: bestFormat, targetFrameRate: targetFrameRate) + var minDistance = abs(_bestFrameRate - targetFrameRate) + var isBestSubTypePreferred = true + + for format in captureDevice.formats { + let resolution = videoDimensionsForFormat(format) + if resolution.width != targetResolution.width || resolution.height != targetResolution.height + { + continue + } + let frameRate = bestFrameRate(for: format, targetFrameRate: targetFrameRate) + let distance = abs(frameRate - targetFrameRate) + let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) + let isSubTypePreferred = subType == preferredSubType + if distance < minDistance + || (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred) + { + bestFormat = format + _bestFrameRate = frameRate + minDistance = distance + isBestSubTypePreferred = isSubTypePreferred + } + } + captureDevice.activeFormat = bestFormat + mediaSettings.framesPerSecond = NSNumber(value: _bestFrameRate) + } + + private static func createConnection( + captureDevice: FLTCaptureDevice, + videoFormat: FourCharCode, + captureDeviceInputFactory: FLTCaptureDeviceInputFactory + ) throws -> (FLTCaptureInput, FLTCaptureVideoDataOutput, AVCaptureConnection) { + // Setup video capture input. + let captureVideoInput = try captureDeviceInputFactory.deviceInput(with: captureDevice) + + // Setup video capture output. + let captureVideoOutput = FLTDefaultCaptureVideoDataOutput( + captureVideoOutput: AVCaptureVideoDataOutput()) + captureVideoOutput.videoSettings = [ + kCVPixelBufferPixelFormatTypeKey as String: videoFormat as Any + ] + captureVideoOutput.alwaysDiscardsLateVideoFrames = true + + // Setup video capture connection. + let connection = AVCaptureConnection( + inputPorts: captureVideoInput.ports, + output: captureVideoOutput.avOutput) + + if captureDevice.position == .front { + connection.isVideoMirrored = true + } + + return (captureVideoInput, captureVideoOutput, connection) + } + + init(configuration: FLTCamConfiguration) throws { + captureSessionQueue = configuration.captureSessionQueue + mediaSettings = configuration.mediaSettings + mediaSettingsAVWrapper = configuration.mediaSettingsWrapper + videoCaptureSession = configuration.videoCaptureSession + audioCaptureSession = configuration.audioCaptureSession + captureDeviceFactory = configuration.captureDeviceFactory + audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory + captureDeviceInputFactory = configuration.captureDeviceInputFactory + assetWriterFactory = configuration.assetWriterFactory + inputPixelBufferAdaptorFactory = configuration.inputPixelBufferAdaptorFactory + videoDimensionsForFormat = configuration.videoDimensionsForFormat + deviceOrientationProvider = configuration.deviceOrientationProvider + + captureDevice = captureDeviceFactory(configuration.initialCameraName) + flashMode = captureDevice.hasFlash ? .auto : .off + + capturePhotoOutput = FLTDefaultCapturePhotoOutput(photoOutput: AVCapturePhotoOutput()) + capturePhotoOutput.highResolutionCaptureEnabled = true + + videoCaptureSession.automaticallyConfiguresApplicationAudioSession = false + audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false + + deviceOrientation = configuration.orientation + + let connection: AVCaptureConnection + (captureVideoInput, captureVideoOutput, connection) = try FLTDefaultCam.createConnection( + captureDevice: captureDevice, + videoFormat: videoFormat, + captureDeviceInputFactory: configuration.captureDeviceInputFactory) + + super.init() + + captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + + videoCaptureSession.addInputWithNoConnections(captureVideoInput) + videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) + videoCaptureSession.addConnection(connection) + + videoCaptureSession.addOutput(capturePhotoOutput.avOutput) + + motionManager.startAccelerometerUpdates() + + if let targetFrameRate = mediaSettings.framesPerSecond { + // The frame rate can be changed only on a locked for configuration device. + try mediaSettingsAVWrapper.lockDevice(captureDevice) + mediaSettingsAVWrapper.beginConfiguration(for: videoCaptureSession) + + // Possible values for presets are hard-coded in FLT interface having + // corresponding AVCaptureSessionPreset counterparts. + // If _resolutionPreset is not supported by camera there is + // fallback to lower resolution presets. + // If none can be selected there is error condition. + do { + try setCaptureSessionPreset(mediaSettings.resolutionPreset) + } catch { + videoCaptureSession.commitConfiguration() + captureDevice.unlockForConfiguration() + throw error + } + + FLTDefaultCam.selectBestFormatForRequestedFrameRate( + captureDevice: captureDevice, + mediaSettings: mediaSettings, + targetFrameRate: targetFrameRate.doubleValue, + videoDimensionsForFormat: videoDimensionsForFormat + ) + + if let framesPerSecond = mediaSettings.framesPerSecond { + // Set frame rate with 1/10 precision allowing non-integral values. + let fpsNominator = floor(framesPerSecond.doubleValue * 10.0) + let duration = CMTimeMake(value: 10, timescale: Int32(fpsNominator)) + + mediaSettingsAVWrapper.setMinFrameDuration(duration, on: captureDevice) + mediaSettingsAVWrapper.setMaxFrameDuration(duration, on: captureDevice) + } + + mediaSettingsAVWrapper.commitConfiguration(for: videoCaptureSession) + mediaSettingsAVWrapper.unlockDevice(captureDevice) + } else { + // If the frame rate is not important fall to a less restrictive + // behavior (no configuration locking). + try setCaptureSessionPreset(mediaSettings.resolutionPreset) + } + + updateOrientation() + } + + private func setCaptureSessionPreset( + _ resolutionPreset: FCPPlatformResolutionPreset + ) throws { + switch resolutionPreset { + case .max: + if let bestFormat = highestResolutionFormat(forCaptureDevice: captureDevice) { + videoCaptureSession.sessionPreset = .inputPriority + if (try? captureDevice.lockForConfiguration()) != nil { + // Set the best device format found and finish the device configuration. + captureDevice.activeFormat = bestFormat + captureDevice.unlockForConfiguration() + break + } + } + fallthrough + case .ultraHigh: + if videoCaptureSession.canSetSessionPreset(.hd4K3840x2160) { + videoCaptureSession.sessionPreset = .hd4K3840x2160 + break + } + if videoCaptureSession.canSetSessionPreset(.high) { + videoCaptureSession.sessionPreset = .high + break + } + fallthrough + case .veryHigh: + if videoCaptureSession.canSetSessionPreset(.hd1920x1080) { + videoCaptureSession.sessionPreset = .hd1920x1080 + break + } + fallthrough + case .high: + if videoCaptureSession.canSetSessionPreset(.hd1280x720) { + videoCaptureSession.sessionPreset = .hd1280x720 + break + } + fallthrough + case .medium: + if videoCaptureSession.canSetSessionPreset(.vga640x480) { + videoCaptureSession.sessionPreset = .vga640x480 + break + } + fallthrough + case .low: + if videoCaptureSession.canSetSessionPreset(.cif352x288) { + videoCaptureSession.sessionPreset = .cif352x288 + break + } + fallthrough + default: + if videoCaptureSession.canSetSessionPreset(.low) { + videoCaptureSession.sessionPreset = .low + } else { + throw NSError( + domain: NSCocoaErrorDomain, + code: URLError.unknown.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "No capture session available for current capture session." + ]) + } + } + + let size = videoDimensionsForFormat(captureDevice.activeFormat) + previewSize = CGSize(width: CGFloat(size.width), height: CGFloat(size.height)) + audioCaptureSession.sessionPreset = videoCaptureSession.sessionPreset + } + + /// Finds the highest available resolution in terms of pixel count for the given device. + /// Preferred are formats with the same subtype as current activeFormat. + private func highestResolutionFormat(forCaptureDevice captureDevice: FLTCaptureDevice) + -> FLTCaptureDeviceFormat? + { + let preferredSubType = CMFormatDescriptionGetMediaSubType( + captureDevice.activeFormat.formatDescription) + var bestFormat: FLTCaptureDeviceFormat? = nil + var maxPixelCount: UInt = 0 + var isBestSubTypePreferred = false + + for format in captureDevice.formats { + let resolution = videoDimensionsForFormat(format) + let height = UInt(resolution.height) + let width = UInt(resolution.width) + let pixelCount = height * width + let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) + let isSubTypePreferred = subType == preferredSubType + + if pixelCount > maxPixelCount + || (pixelCount == maxPixelCount && isSubTypePreferred && !isBestSubTypePreferred) + { + bestFormat = format + maxPixelCount = pixelCount + isBestSubTypePreferred = isSubTypePreferred + } + } + + return bestFormat + } + + func setUpCaptureSessionForAudioIfNeeded() { + // Don't setup audio twice or we will lose the audio. + guard !mediaSettings.enableAudio || !isAudioSetup else { return } + + let audioDevice = audioCaptureDeviceFactory() + do { + // Create a device input with the device and add it to the session. + // Setup the audio input. + let audioInput = try captureDeviceInputFactory.deviceInput(with: audioDevice) + + // Setup the audio output. + let audioOutput = AVCaptureAudioDataOutput() + + let block = { + // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other + // plugins like video_player. + FLTDefaultCam.upgradeAudioSessionCategory( + requestedCategory: .playAndRecord, + options: [.defaultToSpeaker, .allowBluetoothA2DP, .allowAirPlay] + ) + } + + if !Thread.isMainThread { + DispatchQueue.main.sync(execute: block) + } else { + block() + } + + if audioCaptureSession.canAddInput(audioInput) { + audioCaptureSession.addInput(audioInput) + + if audioCaptureSession.canAddOutput(audioOutput) { + audioCaptureSession.addOutput(audioOutput) + audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + isAudioSetup = true + } else { + reportErrorMessage("Unable to add Audio input/output to session capture") + isAudioSetup = false + } + } + } catch let error as NSError { + reportErrorMessage(error.description) + } + } + + // This function, although slightly modified, is also in video_player_avfoundation. + // Both need to do the same thing and run on the same thread (for example main thread). + // Configure application wide audio session manually to prevent overwriting flag + // MixWithOthers by capture session. + // Only change category if it is considered an upgrade which means it can only enable + // ability to play in silent mode or ability to record audio but never disables it, + // that could affect other plugins which depend on this global state. Only change + // category or options if there is change to prevent unnecessary lags and silence. + private static func upgradeAudioSessionCategory( + requestedCategory: AVAudioSession.Category, + options: AVAudioSession.CategoryOptions + ) { + let playCategories: Set = [.playback, .playAndRecord] + let recordCategories: Set = [.record, .playAndRecord] + let requiredCategories: Set = [ + requestedCategory, AVAudioSession.sharedInstance().category, + ] + + let requiresPlay = !requiredCategories.isDisjoint(with: playCategories) + let requiresRecord = !requiredCategories.isDisjoint(with: recordCategories) + + var finalCategory = requestedCategory + if requiresPlay && requiresRecord { + finalCategory = .playAndRecord + } else if requiresPlay { + finalCategory = .playback + } else if requiresRecord { + finalCategory = .record + } + + let finalOptions = AVAudioSession.sharedInstance().categoryOptions.union(options) + + if finalCategory == AVAudioSession.sharedInstance().category + && finalOptions == AVAudioSession.sharedInstance().categoryOptions + { + return + } + + try? AVAudioSession.sharedInstance().setCategory(finalCategory, options: finalOptions) + } + + func reportInitializationState() { + // Get all the state on the current thread, not the main thread. + let state = FCPPlatformCameraState.make( + withPreviewSize: FCPPlatformSize.make( + withWidth: Double(previewSize!.width), + height: Double(previewSize!.height) + ), + exposureMode: exposureMode, + focusMode: focusMode, + exposurePointSupported: captureDevice.isExposurePointOfInterestSupported, + focusPointSupported: captureDevice.isFocusPointOfInterestSupported + ) + + FLTEnsureToRunOnMainQueue { [weak self] in + self?.dartAPI?.initialized(with: state) { _ in + // Ignore any errors, as this is just an event broadcast. + } + } + } + + func receivedImageStreamData() { + streamingPendingFramesCount -= 1 + } + + func start() { + videoCaptureSession.startRunning() + audioCaptureSession.startRunning() + } + + func stop() { + videoCaptureSession.stopRunning() + audioCaptureSession.stopRunning() + } + + func startVideoRecording( + completion: @escaping (FlutterError?) -> Void, + messengerForStreaming messenger: FlutterBinaryMessenger? + ) { + guard !isRecording else { + completion( + FlutterError( + code: "Error", + message: "Video is already recording", + details: nil)) + return + } + + if let messenger = messenger { + startImageStream(with: messenger) + } + + let videoRecordingPath: String + do { + videoRecordingPath = try getTemporaryFilePath( + withExtension: "mp4", + subfolder: "videos", + prefix: "REC_") + self.videoRecordingPath = videoRecordingPath + } catch let error as NSError { + completion(FLTDefaultCam.flutterErrorFromNSError(error)) + return + } + + guard setupWriter(forPath: videoRecordingPath) else { + completion( + FlutterError( + code: "IOError", + message: "Setup Writer Failed", + details: nil)) + return + } + + // startWriting should not be called in didOutputSampleBuffer where it can cause state + // in which _isRecording is YES but _videoWriter.status is AVAssetWriterStatusUnknown + // in stopVideoRecording if it is called after startVideoRecording but before + // didOutputSampleBuffer had chance to call startWriting and lag at start of video + // https://github.com/flutter/flutter/issues/132016 + // https://github.com/flutter/flutter/issues/151319 + videoWriter?.startWriting() + isFirstVideoSample = true + isRecording = true + isRecordingPaused = false + videoTimeOffset = CMTimeMake(value: 0, timescale: 1) + audioTimeOffset = CMTimeMake(value: 0, timescale: 1) + videoIsDisconnected = false + audioIsDisconnected = false + completion(nil) + } + + private func setupWriter(forPath path: String) -> Bool { + setUpCaptureSessionForAudioIfNeeded() + + var error: NSError? + videoWriter = assetWriterFactory(URL(fileURLWithPath: path), AVFileType.mp4, &error) + + guard let videoWriter = videoWriter else { + if let error = error { + reportErrorMessage(error.description) + } + return false + } + + var videoSettings = mediaSettingsAVWrapper.recommendedVideoSettingsForAssetWriter( + withFileType: + AVFileType.mp4, + for: captureVideoOutput + ) + + if mediaSettings.videoBitrate != nil || mediaSettings.framesPerSecond != nil { + var compressionProperties: [String: Any] = [:] + + if let videoBitrate = mediaSettings.videoBitrate { + compressionProperties[AVVideoAverageBitRateKey] = videoBitrate + } + + if let framesPerSecond = mediaSettings.framesPerSecond { + compressionProperties[AVVideoExpectedSourceFrameRateKey] = framesPerSecond + } + + videoSettings?[AVVideoCompressionPropertiesKey] = compressionProperties + } + + let videoWriterInput = mediaSettingsAVWrapper.assetWriterVideoInput( + withOutputSettings: videoSettings) + self.videoWriterInput = videoWriterInput + + let sourcePixelBufferAttributes: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: videoFormat + ] + + videoAdaptor = inputPixelBufferAdaptorFactory(videoWriterInput, sourcePixelBufferAttributes) + + videoWriterInput.expectsMediaDataInRealTime = true + + // Add the audio input + if mediaSettings.enableAudio { + var acl = AudioChannelLayout() + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono + + let aclSize = MemoryLayout.size(ofValue: acl) + let aclData = Data(bytes: &acl, count: aclSize) + + var audioSettings: [String: Any] = [ + AVFormatIDKey: kAudioFormatMPEG4AAC, + AVSampleRateKey: 44100.0, + AVNumberOfChannelsKey: 1, + AVChannelLayoutKey: aclData, + ] + + if let audioBitrate = mediaSettings.audioBitrate { + audioSettings[AVEncoderBitRateKey] = audioBitrate + } + + let newAudioWriterInput = mediaSettingsAVWrapper.assetWriterAudioInput( + withOutputSettings: audioSettings) + newAudioWriterInput.expectsMediaDataInRealTime = true + mediaSettingsAVWrapper.addInput(newAudioWriterInput, to: videoWriter) + self.audioWriterInput = newAudioWriterInput + } + + if flashMode == .torch { + try? captureDevice.lockForConfiguration() + captureDevice.torchMode = .on + captureDevice.unlockForConfiguration() + } + + mediaSettingsAVWrapper.addInput(videoWriterInput, to: videoWriter) + + captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + + return true + } + + func pauseVideoRecording() { + isRecordingPaused = true + videoIsDisconnected = true + audioIsDisconnected = true + } + + func resumeVideoRecording() { + isRecordingPaused = false + } + + func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + if isRecording { + isRecording = false + + // When `isRecording` is true, `startWriting` was already called, so `videoWriter.status` + // is always either `.writing` or `.failed`. `finishWriting` does not throw exceptions, + // so there is no need to check `videoWriter.status` beforehand. + videoWriter?.finishWriting { + if self.videoWriter?.status == .completed { + self.updateOrientation() + completion(self.videoRecordingPath, nil) + self.videoRecordingPath = nil + } else { + completion( + nil, + FlutterError( + code: "IOError", + message: "AVAssetWriter could not finish writing!", + details: nil)) + } + } + } else { + let error = NSError( + domain: NSCocoaErrorDomain, + code: URLError.resourceUnavailable.rawValue, + userInfo: [NSLocalizedDescriptionKey: "Video is not recording!"] + ) + completion(nil, FLTDefaultCam.flutterErrorFromNSError(error)) + } + } + + func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + var settings = AVCapturePhotoSettings() + + if mediaSettings.resolutionPreset == .max { + settings.isHighResolutionPhotoEnabled = true + } + + var fileExtension: String + + let isHEVCCodecAvailable = capturePhotoOutput.availablePhotoCodecTypes.contains( + AVVideoCodecType.hevc) + + if fileFormat == .heif, isHEVCCodecAvailable { + settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) + fileExtension = "heif" + } else { + fileExtension = "jpg" + } + + if flashMode != .torch { + settings.flashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(flashMode) + } + + let path: String + do { + path = try getTemporaryFilePath( + withExtension: fileExtension, + subfolder: "pictures", + prefix: "CAP_") + } catch let error as NSError { + completion(nil, FLTDefaultCam.flutterErrorFromNSError(error)) + return + } + + let savePhotoDelegate = FLTSavePhotoDelegate( + path: path, + ioQueue: photoIOQueue, + completionHandler: { [weak self] path, error in + guard let strongSelf = self else { return } + + strongSelf.captureSessionQueue.async { + if let strongSelf = self { + strongSelf.inProgressSavePhotoDelegates.removeValue( + forKey: settings.uniqueID) + } + } + + if let error = error { + completion(nil, FLTDefaultCam.flutterErrorFromNSError(error as NSError)) + } else { + assert(path != nil, "Path must not be nil if no error.") + completion(path, nil) + } + } + ) + + assert( + DispatchQueue.getSpecific(key: fltCaptureSessionQueueSpecificKey) + == fltCaptureSessionQueueSpecificValue, + "save photo delegate references must be updated on the capture session queue") + inProgressSavePhotoDelegates[settings.uniqueID] = savePhotoDelegate + capturePhotoOutput.capturePhoto(with: settings, delegate: savePhotoDelegate) + + } + + private func getTemporaryFilePath(withExtension ext: String, subfolder: String, prefix: String) + throws + -> String + { + let docDir = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] + let fileDir = docDir.appendingPathComponent("camera").appendingPathComponent(subfolder) + let fileName = prefix + UUID().uuidString + let file = fileDir.appendingPathComponent(fileName).appendingPathExtension(ext).path + + let fileManager = FileManager.default + if !fileManager.fileExists(atPath: fileDir.path) { + try fileManager.createDirectory( + at: fileDir, + withIntermediateDirectories: true, + attributes: nil) + } + + return file + } + + func setDeviceOrientation(_ orientation: UIDeviceOrientation) { + if deviceOrientation == orientation { + return + } + + deviceOrientation = orientation + updateOrientation() + } + + private func updateOrientation() { + guard !isRecording else { return } + + let orientation: UIDeviceOrientation = + (lockedCaptureOrientation != .unknown) + ? lockedCaptureOrientation + : deviceOrientation + + updateOrientation(orientation, forCaptureOutput: capturePhotoOutput) + updateOrientation(orientation, forCaptureOutput: captureVideoOutput) + } + + private func updateOrientation( + _ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: any FLTCaptureOutput + ) { + if let connection = captureOutput.connection(withMediaType: .video), + connection.isVideoOrientationSupported + { + connection.videoOrientation = getVideoOrientation(forDeviceOrientation: orientation) + } + } + + private func getVideoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation) + -> AVCaptureVideoOrientation + { + switch deviceOrientation { + case .portrait: + return .portrait + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + case .portraitUpsideDown: + return .portraitUpsideDown + default: + return .portrait + } + } + + func lockCaptureOrientation(_ pigeonOrientation: FCPPlatformDeviceOrientation) { + let orientation = FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation) + if lockedCaptureOrientation != orientation { + lockedCaptureOrientation = orientation + updateOrientation() + } + } + + func unlockCaptureOrientation() { + lockedCaptureOrientation = .unknown + updateOrientation() + } + + func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + self.fileFormat = fileFormat + } + + func setExposureMode(_ mode: FCPPlatformExposureMode) { + exposureMode = mode + applyExposureMode() + } + + private func applyExposureMode() { + try? captureDevice.lockForConfiguration() + switch exposureMode { + case .locked: + // AVCaptureExposureMode.autoExpose automatically adjusts the exposure one time, and then locks exposure for the device + captureDevice.setExposureMode(.autoExpose) + case .auto: + if captureDevice.isExposureModeSupported(.continuousAutoExposure) { + captureDevice.setExposureMode(.continuousAutoExposure) + } else { + captureDevice.setExposureMode(.autoExpose) + } + @unknown default: + assertionFailure("Unknown exposure mode") + } + captureDevice.unlockForConfiguration() + } + + func setExposureOffset(_ offset: Double) { + try? captureDevice.lockForConfiguration() + captureDevice.setExposureTargetBias(Float(offset), completionHandler: nil) + captureDevice.unlockForConfiguration() + } + + func setExposurePoint( + _ point: FCPPlatformPoint?, withCompletion completion: @escaping (FlutterError?) -> Void + ) { + guard captureDevice.isExposurePointOfInterestSupported else { + completion( + FlutterError( + code: "setExposurePointFailed", + message: "Device does not have exposure point capabilities", + details: nil)) + return + } + + let orientation = UIDevice.current.orientation + try? captureDevice.lockForConfiguration() + // A nil point resets to the center. + let exposurePoint = cgPoint( + for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) + captureDevice.setExposurePointOfInterest(exposurePoint) + captureDevice.unlockForConfiguration() + // Retrigger auto exposure + applyExposureMode() + completion(nil) + } + + func setFocusMode(_ mode: FCPPlatformFocusMode) { + focusMode = mode + applyFocusMode() + } + + func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { + guard captureDevice.isFocusPointOfInterestSupported else { + completion( + FlutterError( + code: "setFocusPointFailed", + message: "Device does not have focus point capabilities", + details: nil)) + return + } + + let orientation = deviceOrientationProvider.orientation() + try? captureDevice.lockForConfiguration() + // A nil point resets to the center. + captureDevice.setFocusPointOfInterest( + cgPoint(for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) + ) + captureDevice.unlockForConfiguration() + // Retrigger auto focus + applyFocusMode() + completion(nil) + } + + private func applyFocusMode() { + applyFocusMode(focusMode, onDevice: captureDevice) + } + + private func applyFocusMode( + _ focusMode: FCPPlatformFocusMode, onDevice captureDevice: FLTCaptureDevice + ) { + try? captureDevice.lockForConfiguration() + switch focusMode { + case .locked: + // AVCaptureFocusMode.autoFocus automatically adjusts the focus one time, and then locks focus + if captureDevice.isFocusModeSupported(.autoFocus) { + captureDevice.setFocusMode(.autoFocus) + } + case .auto: + if captureDevice.isFocusModeSupported(.continuousAutoFocus) { + captureDevice.setFocusMode(.continuousAutoFocus) + } else if captureDevice.isFocusModeSupported(.autoFocus) { + captureDevice.setFocusMode(.autoFocus) + } + @unknown default: + assertionFailure("Unknown focus mode") + } + captureDevice.unlockForConfiguration() + } + + private func cgPoint( + for point: FCPPlatformPoint, withOrientation orientation: UIDeviceOrientation + ) + -> CGPoint + { + var x = point.x + var y = point.y + switch orientation { + case .portrait: // 90 ccw + y = 1 - point.x + x = point.y + case .portraitUpsideDown: // 90 cw + x = 1 - point.y + y = point.x + case .landscapeRight: // 180 + x = 1 - point.x + y = 1 - point.y + case .landscapeLeft: + // No rotation required + break + default: + // No rotation required + break + } + return CGPoint(x: x, y: y) + } + + func setZoomLevel(_ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void) { + if zoom < captureDevice.minAvailableVideoZoomFactor + || zoom > captureDevice.maxAvailableVideoZoomFactor + { + completion( + FlutterError( + code: "ZOOM_ERROR", + message: + "Zoom level out of bounds (zoom level should be between \(captureDevice.minAvailableVideoZoomFactor) and \(captureDevice.maxAvailableVideoZoomFactor).", + details: nil)) + return + } + + do { + try captureDevice.lockForConfiguration() + } catch let error as NSError { + completion(FLTDefaultCam.flutterErrorFromNSError(error)) + return + } + + captureDevice.videoZoomFactor = zoom + captureDevice.unlockForConfiguration() + completion(nil) + } + + func setFlashMode( + _ mode: FCPPlatformFlashMode, withCompletion completion: @escaping (FlutterError?) -> Void + ) { + if mode == .torch { + guard captureDevice.hasTorch else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Device does not support torch mode", + details: nil) + ) + return + } + guard captureDevice.isTorchAvailable else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Torch mode is currently not available", + details: nil)) + return + } + if captureDevice.torchMode != .on { + try? captureDevice.lockForConfiguration() + captureDevice.torchMode = .on + captureDevice.unlockForConfiguration() + } + } else { + guard captureDevice.hasFlash else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Device does not have flash capabilities", + details: nil)) + return + } + let avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode) + guard capturePhotoOutput.supportedFlashModes.contains(NSNumber(value: avFlashMode.rawValue)) + else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Device does not support this specific flash mode", + details: nil)) + return + } + if captureDevice.torchMode != .off { + try? captureDevice.lockForConfiguration() + captureDevice.torchMode = .off + captureDevice.unlockForConfiguration() + } + } + flashMode = mode + completion(nil) + } + + func pausePreview() { + isPreviewPaused = true + } + + func resumePreview() { + isPreviewPaused = false + } + + func setDescriptionWhileRecording( + _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void + ) { + guard isRecording else { + completion( + FlutterError( + code: "setDescriptionWhileRecordingFailed", + message: "Device was not recording", + details: nil)) + return + } + + captureDevice = captureDeviceFactory(cameraName) + + let oldConnection = captureVideoOutput.connection(withMediaType: .video) + + // Stop video capture from the old output. + captureVideoOutput.setSampleBufferDelegate(nil, queue: nil) + + // Remove the old video capture connections. + videoCaptureSession.beginConfiguration() + videoCaptureSession.removeInput(captureVideoInput) + videoCaptureSession.removeOutput(captureVideoOutput.avOutput) + + let newConnection: AVCaptureConnection + + do { + (captureVideoInput, captureVideoOutput, newConnection) = try FLTDefaultCam.createConnection( + captureDevice: captureDevice, + videoFormat: videoFormat, + captureDeviceInputFactory: captureDeviceInputFactory) + + captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + } catch { + completion( + FlutterError( + code: "VideoError", + message: "Unable to create video connection", + details: nil)) + return + } + + // Keep the same orientation the old connections had. + if let oldConnection = oldConnection, newConnection.isVideoOrientationSupported { + newConnection.videoOrientation = oldConnection.videoOrientation + } + + // Add the new connections to the session. + if !videoCaptureSession.canAddInput(captureVideoInput) { + completion( + FlutterError( + code: "VideoError", + message: "Unable to switch video input", + details: nil)) + } + videoCaptureSession.addInputWithNoConnections(captureVideoInput) + + if !videoCaptureSession.canAddOutput(captureVideoOutput.avOutput) { + completion( + FlutterError( + code: "VideoError", + message: "Unable to switch video output", + details: nil)) + } + videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) + + if !videoCaptureSession.canAddConnection(newConnection) { + completion( + FlutterError( + code: "VideoError", + message: "Unable to switch video connection", + details: nil)) + } + videoCaptureSession.addConnection(newConnection) + videoCaptureSession.commitConfiguration() + + completion(nil) + } + + func startImageStream(with messenger: FlutterBinaryMessenger) { + startImageStream( + with: messenger, + imageStreamHandler: FLTImageStreamHandler(captureSessionQueue: captureSessionQueue) + ) + } + + func startImageStream( + with messenger: FlutterBinaryMessenger, + imageStreamHandler: FLTImageStreamHandler + ) { + if isStreamingImages { + reportErrorMessage("Images from camera are already streaming!") + return + } + + let eventChannel = FlutterEventChannel( + name: "plugins.flutter.io/camera_avfoundation/imageStream", + binaryMessenger: messenger + ) + let threadSafeEventChannel = FLTThreadSafeEventChannel(eventChannel: eventChannel) + + self.imageStreamHandler = imageStreamHandler + threadSafeEventChannel.setStreamHandler(imageStreamHandler) { [weak self] in + guard let strongSelf = self else { return } + + strongSelf.captureSessionQueue.async { [weak self] in + guard let strongSelf = self else { return } + + strongSelf.isStreamingImages = true + strongSelf.streamingPendingFramesCount = 0 + } + } + } + + func stopImageStream() { + if isStreamingImages { + isStreamingImages = false + imageStreamHandler = nil + } else { + reportErrorMessage("Images from camera are not streaming!") + } + } + + func captureOutput( + _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { + captureOutput(output, didOutput: sampleBuffer) + } + + func captureOutput(_ output: AVCaptureOutput?, didOutput sampleBuffer: CMSampleBuffer) { + if output == captureVideoOutput.avOutput { + if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { + + pixelBufferSynchronizationQueue.sync { + latestPixelBuffer = newBuffer + } + + onFrameAvailable?() + } + } + + guard CMSampleBufferDataIsReady(sampleBuffer) else { + reportErrorMessage("sample buffer is not ready. Skipping sample") + return + } + + if isStreamingImages { + if let eventSink = imageStreamHandler?.eventSink, + streamingPendingFramesCount < maxStreamingPendingFramesCount + { + streamingPendingFramesCount += 1 + + let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! + // Must lock base address before accessing the pixel data + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + + let imageWidth = CVPixelBufferGetWidth(pixelBuffer) + let imageHeight = CVPixelBufferGetHeight(pixelBuffer) + + var planes: [[String: Any]] = [] + + let isPlanar = CVPixelBufferIsPlanar(pixelBuffer) + let planeCount = isPlanar ? CVPixelBufferGetPlaneCount(pixelBuffer) : 1 + + for i in 0.. 0 { + currentSampleTime = CMTimeAdd(currentSampleTime, dur) + } + + if audioIsDisconnected { + audioIsDisconnected = false + + audioTimeOffset = + audioTimeOffset.value == 0 + ? CMTimeSubtract(currentSampleTime, lastAudioSampleTime) + : CMTimeAdd(audioTimeOffset, CMTimeSubtract(currentSampleTime, lastAudioSampleTime)) + + return + } + + lastAudioSampleTime = currentSampleTime + + if audioTimeOffset.value != 0 { + if let adjustedSampleBuffer = copySampleBufferWithAdjustedTime( + sampleBuffer, + by: audioTimeOffset) + { + newAudioSample(adjustedSampleBuffer) + } + } else { + newAudioSample(sampleBuffer) + } + } + } + } + + private func copySampleBufferWithAdjustedTime(_ sample: CMSampleBuffer, by offset: CMTime) + -> CMSampleBuffer? + { + var count: CMItemCount = 0 + CMSampleBufferGetSampleTimingInfoArray( + sample, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count) + + let timingInfo = UnsafeMutablePointer.allocate(capacity: Int(count)) + defer { timingInfo.deallocate() } + + CMSampleBufferGetSampleTimingInfoArray( + sample, entryCount: count, arrayToFill: timingInfo, entriesNeededOut: &count) + + for i in 0.. Unmanaged? { + var pixelBuffer: CVPixelBuffer? + pixelBufferSynchronizationQueue.sync { + pixelBuffer = latestPixelBuffer + latestPixelBuffer = nil + } + + if let buffer = pixelBuffer { + return Unmanaged.passRetained(buffer) + } else { + return nil + } + } + + private func reportErrorMessage(_ errorMessage: String) { + FLTEnsureToRunOnMainQueue { [weak self] in + self?.dartAPI?.reportError(errorMessage) { _ in + // Ignore any errors, as this is just an event broadcast. + } + } + } + + deinit { + motionManager.stopAccelerometerUpdates() + } +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift new file mode 100644 index 000000000000..42b26ba207d3 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift @@ -0,0 +1,36 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +class FLTImageStreamHandler: NSObject, FlutterStreamHandler { + + /// The queue on which `eventSink` property should be accessed. + let captureSessionQueue: DispatchQueue + + /// The event sink to stream camera events to Dart. + /// + /// The property should only be accessed on `captureSessionQueue`. + /// The block itself should be invoked on the main queue. + private(set) var eventSink: FlutterEventSink? + + init(captureSessionQueue: DispatchQueue) { + self.captureSessionQueue = captureSessionQueue + super.init() + } + + func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) + -> FlutterError? + { + captureSessionQueue.async { [weak self] in + self?.eventSink = events + } + return nil + } + + func onCancel(withArguments arguments: Any?) -> FlutterError? { + captureSessionQueue.async { [weak self] in + self?.eventSink = nil + } + return nil + } +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift new file mode 100644 index 000000000000..074153a15085 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift @@ -0,0 +1,7 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +/// Queue-specific context data to be associated with the capture session queue. +let fltCaptureSessionQueueSpecificKey = DispatchSpecificKey() +let fltCaptureSessionQueueSpecificValue = "capture_session_queue" diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m deleted file mode 100644 index 974c3a92f782..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m +++ /dev/null @@ -1,1441 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import "./include/camera_avfoundation/FLTCam.h" -#import "./include/camera_avfoundation/FLTCam_Test.h" - -@import CoreMotion; -@import Flutter; -#import - -#import "./include/camera_avfoundation/FLTCaptureConnection.h" -#import "./include/camera_avfoundation/FLTCaptureDevice.h" -#import "./include/camera_avfoundation/FLTDeviceOrientationProviding.h" -#import "./include/camera_avfoundation/FLTEventChannel.h" -#import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" -#import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" -#import "./include/camera_avfoundation/QueueUtils.h" -#import "./include/camera_avfoundation/messages.g.h" - -static FlutterError *FlutterErrorFromNSError(NSError *error) { - return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] - message:error.localizedDescription - details:error.domain]; -} - -@implementation FLTImageStreamHandler - -- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _captureSessionQueue = captureSessionQueue; - return self; -} - -- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { - __weak typeof(self) weakSelf = self; - dispatch_async(self.captureSessionQueue, ^{ - weakSelf.eventSink = nil; - }); - return nil; -} - -- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)events { - __weak typeof(self) weakSelf = self; - dispatch_async(self.captureSessionQueue, ^{ - weakSelf.eventSink = events; - }); - return nil; -} -@end - -@interface FLTCam () - -@property(readonly, nonatomic) int64_t textureId; -@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings; -@property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper; -@property(nonatomic) FLTImageStreamHandler *imageStreamHandler; -@property(readonly, nonatomic) NSObject *videoCaptureSession; -@property(readonly, nonatomic) NSObject *audioCaptureSession; - -@property(readonly, nonatomic) NSObject *captureVideoInput; -/// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. -/// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. -@property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer; -@property(readonly, nonatomic) CGSize captureSize; -@property(strong, nonatomic) NSObject *videoWriter; -@property(strong, nonatomic) NSObject *videoWriterInput; -@property(strong, nonatomic) NSObject *audioWriterInput; -@property(strong, nonatomic) - NSObject *assetWriterPixelBufferAdaptor; -@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput; -@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput; -@property(strong, nonatomic) NSString *videoRecordingPath; -@property(assign, nonatomic) BOOL isFirstVideoSample; -@property(assign, nonatomic) BOOL isRecording; -@property(assign, nonatomic) BOOL isRecordingPaused; -@property(assign, nonatomic) BOOL videoIsDisconnected; -@property(assign, nonatomic) BOOL audioIsDisconnected; -@property(assign, nonatomic) BOOL isAudioSetup; - -/// Number of frames currently pending processing. -@property(assign, nonatomic) int streamingPendingFramesCount; - -/// Maximum number of frames pending processing. -@property(assign, nonatomic) int maxStreamingPendingFramesCount; - -@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation; -@property(assign, nonatomic) CMTime lastVideoSampleTime; -@property(assign, nonatomic) CMTime lastAudioSampleTime; -@property(assign, nonatomic) CMTime videoTimeOffset; -@property(assign, nonatomic) CMTime audioTimeOffset; -@property(nonatomic) CMMotionManager *motionManager; -@property NSObject *videoAdaptor; -/// All FLTCam's state access and capture session related operations should be on run on this queue. -@property(strong, nonatomic) dispatch_queue_t captureSessionQueue; -/// The queue on which `latestPixelBuffer` property is accessed. -/// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`. -@property(strong, nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; -/// The queue on which captured photos (not videos) are written to disk. -/// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation. -@property(strong, nonatomic) dispatch_queue_t photoIOQueue; -@property(assign, nonatomic) UIDeviceOrientation deviceOrientation; -/// A wrapper for CMVideoFormatDescriptionGetDimensions. -/// Allows for alternate implementations in tests. -@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat; -/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. -@property(nonatomic, copy) CaptureDeviceFactory captureDeviceFactory; -@property(readonly, nonatomic) NSObject *captureDeviceInputFactory; -@property(assign, nonatomic) FCPPlatformExposureMode exposureMode; -@property(assign, nonatomic) FCPPlatformFocusMode focusMode; -@property(assign, nonatomic) FCPPlatformFlashMode flashMode; -@property(readonly, nonatomic) NSObject *deviceOrientationProvider; -@property(nonatomic, copy) AssetWriterFactory assetWriterFactory; -@property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory; -/// Reports the given error message to the Dart side of the plugin. -/// -/// Can be called from any thread. -- (void)reportErrorMessage:(NSString *)errorMessage; -@end - -@implementation FLTCam - -NSString *const errorMethod = @"error"; - -// Returns frame rate supported by format closest to targetFrameRate. -static double bestFrameRateForFormat(NSObject *format, - double targetFrameRate) { - double bestFrameRate = 0; - double minDistance = DBL_MAX; - for (NSObject *range in format.videoSupportedFrameRateRanges) { - double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); - double distance = fabs(frameRate - targetFrameRate); - if (distance < minDistance) { - bestFrameRate = frameRate; - minDistance = distance; - } - } - return bestFrameRate; -} - -// Finds format with same resolution as current activeFormat in captureDevice for which -// bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. -// Preferred are formats with the same subtype as current activeFormat. Sets this format -// as activeFormat and also updates mediaSettings.framesPerSecond to value which -// bestFrameRateForFormat returned for that format. -static void selectBestFormatForRequestedFrameRate( - NSObject *captureDevice, FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) { - CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); - double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - NSObject *bestFormat = captureDevice.activeFormat; - double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); - double minDistance = fabs(bestFrameRate - targetFrameRate); - BOOL isBestSubTypePreferred = YES; - for (NSObject *format in captureDevice.formats) { - CMVideoDimensions resolution = videoDimensionsForFormat(format); - if (resolution.width != targetResolution.width || - resolution.height != targetResolution.height) { - continue; - } - double frameRate = bestFrameRateForFormat(format, targetFrameRate); - double distance = fabs(frameRate - targetFrameRate); - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - BOOL isSubTypePreferred = subType == preferredSubType; - if (distance < minDistance || - (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred)) { - bestFormat = format; - bestFrameRate = frameRate; - minDistance = distance; - isBestSubTypePreferred = isSubTypePreferred; - } - } - captureDevice.activeFormat = bestFormat; - mediaSettings.framesPerSecond = @(bestFrameRate); -} - -- (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configuration - error:(NSError **)error { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _mediaSettings = configuration.mediaSettings; - _mediaSettingsAVWrapper = configuration.mediaSettingsWrapper; - - _captureSessionQueue = configuration.captureSessionQueue; - _pixelBufferSynchronizationQueue = - dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL); - _photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL); - _videoCaptureSession = configuration.videoCaptureSession; - _audioCaptureSession = configuration.audioCaptureSession; - _captureDeviceFactory = configuration.captureDeviceFactory; - _captureDevice = _captureDeviceFactory(configuration.initialCameraName); - _captureDeviceInputFactory = configuration.captureDeviceInputFactory; - _videoDimensionsForFormat = configuration.videoDimensionsForFormat; - _flashMode = _captureDevice.hasFlash ? FCPPlatformFlashModeAuto : FCPPlatformFlashModeOff; - _exposureMode = FCPPlatformExposureModeAuto; - _focusMode = FCPPlatformFocusModeAuto; - _lockedCaptureOrientation = UIDeviceOrientationUnknown; - _deviceOrientation = configuration.orientation; - _videoFormat = kCVPixelFormatType_32BGRA; - _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary]; - _fileFormat = FCPPlatformImageFileFormatJpeg; - _videoCaptureSession.automaticallyConfiguresApplicationAudioSession = NO; - _audioCaptureSession.automaticallyConfiguresApplicationAudioSession = NO; - _assetWriterFactory = configuration.assetWriterFactory; - _inputPixelBufferAdaptorFactory = configuration.inputPixelBufferAdaptorFactory; - - // To limit memory consumption, limit the number of frames pending processing. - // After some testing, 4 was determined to be the best maximum value. - // https://github.com/flutter/plugins/pull/4520#discussion_r766335637 - _maxStreamingPendingFramesCount = 4; - - NSError *localError = nil; - AVCaptureConnection *connection = [self createConnection:&localError]; - if (localError) { - if (error != nil) { - *error = localError; - } - return nil; - } - - [_videoCaptureSession addInputWithNoConnections:_captureVideoInput]; - [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput.avOutput]; - [_videoCaptureSession addConnection:connection]; - - _capturePhotoOutput = - [[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]]; - [_capturePhotoOutput setHighResolutionCaptureEnabled:YES]; - [_videoCaptureSession addOutput:_capturePhotoOutput.avOutput]; - - _motionManager = [[CMMotionManager alloc] init]; - [_motionManager startAccelerometerUpdates]; - - _deviceOrientationProvider = configuration.deviceOrientationProvider; - - if (_mediaSettings.framesPerSecond) { - // The frame rate can be changed only on a locked for configuration device. - if ([_mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { - [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; - - // Possible values for presets are hard-coded in FLT interface having - // corresponding AVCaptureSessionPreset counterparts. - // If _resolutionPreset is not supported by camera there is - // fallback to lower resolution presets. - // If none can be selected there is error condition. - if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) { - [_videoCaptureSession commitConfiguration]; - [_captureDevice unlockForConfiguration]; - return nil; - } - - selectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, - _videoDimensionsForFormat); - - // Set frame rate with 1/10 precision allowing not integral values. - int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); - CMTime duration = CMTimeMake(10, fpsNominator); - - [_mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; - [_mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; - - [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; - [_mediaSettingsAVWrapper unlockDevice:_captureDevice]; - } else { - return nil; - } - } else { - // If the frame rate is not important fall to a less restrictive - // behavior (no configuration locking). - if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) { - return nil; - } - } - - [self updateOrientation]; - - return self; -} - -- (AVCaptureConnection *)createConnection:(NSError **)error { - // Setup video capture input. - _captureVideoInput = [_captureDeviceInputFactory deviceInputWithDevice:_captureDevice - error:error]; - - // Test the return value of the `deviceInputWithDevice` method to see whether an error occurred. - // Don’t just test to see whether the error pointer was set to point to an error. - // See: - // https://developer.apple.com/library/archive/documentation/Cocoa/Conceptual/ProgrammingWithObjectiveC/ErrorHandling/ErrorHandling.html - if (!_captureVideoInput) { - return nil; - } - - // Setup video capture output. - _captureVideoOutput = [[FLTDefaultCaptureVideoDataOutput alloc] - initWithCaptureVideoOutput:[AVCaptureVideoDataOutput new]]; - _captureVideoOutput.videoSettings = - @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)}; - [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES]; - [_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue]; - - // Setup video capture connection. - AVCaptureConnection *connection = - [AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports - output:_captureVideoOutput.avOutput]; - if ([_captureDevice position] == AVCaptureDevicePositionFront) { - connection.videoMirrored = YES; - } - - return connection; -} - -- (void)reportInitializationState { - // Get all the state on the current thread, not the main thread. - FCPPlatformCameraState *state = [FCPPlatformCameraState - makeWithPreviewSize:[FCPPlatformSize makeWithWidth:self.previewSize.width - height:self.previewSize.height] - exposureMode:self.exposureMode - focusMode:self.focusMode - exposurePointSupported:self.captureDevice.exposurePointOfInterestSupported - focusPointSupported:self.captureDevice.focusPointOfInterestSupported]; - - __weak typeof(self) weakSelf = self; - FLTEnsureToRunOnMainQueue(^{ - [weakSelf.dartAPI initializedWithState:state - completion:^(FlutterError *error){ - // Ignore any errors, as this is just an event broadcast. - }]; - }); -} - -- (void)start { - [_videoCaptureSession startRunning]; - [_audioCaptureSession startRunning]; -} - -- (void)stop { - [_videoCaptureSession stopRunning]; - [_audioCaptureSession stopRunning]; -} - -- (void)setVideoFormat:(OSType)videoFormat { - _videoFormat = videoFormat; - _captureVideoOutput.videoSettings = - @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)}; -} - -- (void)setImageFileFormat:(FCPPlatformImageFileFormat)fileFormat { - _fileFormat = fileFormat; -} - -- (void)setDeviceOrientation:(UIDeviceOrientation)orientation { - if (_deviceOrientation == orientation) { - return; - } - - _deviceOrientation = orientation; - [self updateOrientation]; -} - -- (void)updateOrientation { - if (_isRecording) { - return; - } - - UIDeviceOrientation orientation = (_lockedCaptureOrientation != UIDeviceOrientationUnknown) - ? _lockedCaptureOrientation - : _deviceOrientation; - - [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput]; - [self updateOrientation:orientation forCaptureOutput:_captureVideoOutput]; -} - -- (void)updateOrientation:(UIDeviceOrientation)orientation - forCaptureOutput:(NSObject *)captureOutput { - if (!captureOutput) { - return; - } - - NSObject *connection = - [captureOutput connectionWithMediaType:AVMediaTypeVideo]; - if (connection && connection.isVideoOrientationSupported) { - connection.videoOrientation = [self getVideoOrientationForDeviceOrientation:orientation]; - } -} - -- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, - FlutterError *_Nullable))completion { - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - - if (self.mediaSettings.resolutionPreset == FCPPlatformResolutionPresetMax) { - [settings setHighResolutionPhotoEnabled:YES]; - } - - NSString *extension; - - BOOL isHEVCCodecAvailable = - [self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC]; - - if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) { - settings = - [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; - extension = @"heif"; - } else { - extension = @"jpg"; - } - - // If the flash is in torch mode, no capture-level flash setting is needed. - if (self.flashMode != FCPPlatformFlashModeTorch) { - [settings setFlashMode:FCPGetAVCaptureFlashModeForPigeonFlashMode(self.flashMode)]; - } - NSError *error; - NSString *path = [self getTemporaryFilePathWithExtension:extension - subfolder:@"pictures" - prefix:@"CAP_" - error:error]; - if (error) { - completion(nil, FlutterErrorFromNSError(error)); - return; - } - - __weak typeof(self) weakSelf = self; - FLTSavePhotoDelegate *savePhotoDelegate = [[FLTSavePhotoDelegate alloc] - initWithPath:path - ioQueue:self.photoIOQueue - completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) { - typeof(self) strongSelf = weakSelf; - if (!strongSelf) return; - dispatch_async(strongSelf.captureSessionQueue, ^{ - // cannot use the outter `strongSelf` - typeof(self) strongSelf = weakSelf; - if (!strongSelf) return; - [strongSelf.inProgressSavePhotoDelegates removeObjectForKey:@(settings.uniqueID)]; - }); - - if (error) { - completion(nil, FlutterErrorFromNSError(error)); - } else { - NSAssert(path, @"Path must not be nil if no error."); - completion(path, nil); - } - }]; - - NSAssert(dispatch_get_specific(FLTCaptureSessionQueueSpecific), - @"save photo delegate references must be updated on the capture session queue"); - self.inProgressSavePhotoDelegates[@(settings.uniqueID)] = savePhotoDelegate; - [self.capturePhotoOutput capturePhotoWithSettings:settings delegate:savePhotoDelegate]; -} - -- (AVCaptureVideoOrientation)getVideoOrientationForDeviceOrientation: - (UIDeviceOrientation)deviceOrientation { - if (deviceOrientation == UIDeviceOrientationPortrait) { - return AVCaptureVideoOrientationPortrait; - } else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation - // is landscape left the video orientation should be landscape right. - return AVCaptureVideoOrientationLandscapeRight; - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation - // is landscape right the video orientation should be landscape left. - return AVCaptureVideoOrientationLandscapeLeft; - } else if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) { - return AVCaptureVideoOrientationPortraitUpsideDown; - } else { - return AVCaptureVideoOrientationPortrait; - } -} - -- (NSString *)getTemporaryFilePathWithExtension:(NSString *)extension - subfolder:(NSString *)subfolder - prefix:(NSString *)prefix - error:(NSError *)error { - NSString *docDir = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0]; - NSString *fileDir = - [[docDir stringByAppendingPathComponent:@"camera"] stringByAppendingPathComponent:subfolder]; - NSString *fileName = [prefix stringByAppendingString:[[NSUUID UUID] UUIDString]]; - NSString *file = - [[fileDir stringByAppendingPathComponent:fileName] stringByAppendingPathExtension:extension]; - - NSFileManager *fm = [NSFileManager defaultManager]; - if (![fm fileExistsAtPath:fileDir]) { - [[NSFileManager defaultManager] createDirectoryAtPath:fileDir - withIntermediateDirectories:true - attributes:nil - error:&error]; - if (error) { - return nil; - } - } - - return file; -} - -- (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset - withError:(NSError **)error { - switch (resolutionPreset) { - case FCPPlatformResolutionPresetMax: { - NSObject *bestFormat = - [self highestResolutionFormatForCaptureDevice:_captureDevice]; - if (bestFormat) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPresetInputPriority; - if ([_captureDevice lockForConfiguration:NULL]) { - // Set the best device format found and finish the device configuration. - _captureDevice.activeFormat = bestFormat; - [_captureDevice unlockForConfiguration]; - break; - } - } - } - case FCPPlatformResolutionPresetUltraHigh: - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPreset3840x2160; - break; - } - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; - break; - } - case FCPPlatformResolutionPresetVeryHigh: - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPreset1920x1080; - break; - } - case FCPPlatformResolutionPresetHigh: - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720; - break; - } - case FCPPlatformResolutionPresetMedium: - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPreset640x480; - break; - } - case FCPPlatformResolutionPresetLow: - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPreset352x288; - break; - } - default: - if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPresetLow]) { - _videoCaptureSession.sessionPreset = AVCaptureSessionPresetLow; - } else { - if (error != nil) { - *error = - [NSError errorWithDomain:NSCocoaErrorDomain - code:NSURLErrorUnknown - userInfo:@{ - NSLocalizedDescriptionKey : - @"No capture session available for current capture session." - }]; - } - return NO; - } - } - CMVideoDimensions size = self.videoDimensionsForFormat(_captureDevice.activeFormat); - _previewSize = CGSizeMake(size.width, size.height); - _audioCaptureSession.sessionPreset = _videoCaptureSession.sessionPreset; - return YES; -} - -/// Finds the highest available resolution in terms of pixel count for the given device. -/// Preferred are formats with the same subtype as current activeFormat. -- (NSObject *)highestResolutionFormatForCaptureDevice: - (NSObject *)captureDevice { - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); - NSObject *bestFormat = nil; - NSUInteger maxPixelCount = 0; - BOOL isBestSubTypePreferred = NO; - for (NSObject *format in _captureDevice.formats) { - CMVideoDimensions res = self.videoDimensionsForFormat(format); - NSUInteger height = res.height; - NSUInteger width = res.width; - NSUInteger pixelCount = height * width; - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - BOOL isSubTypePreferred = subType == preferredSubType; - if (pixelCount > maxPixelCount || - (pixelCount == maxPixelCount && isSubTypePreferred && !isBestSubTypePreferred)) { - bestFormat = format; - maxPixelCount = pixelCount; - isBestSubTypePreferred = isSubTypePreferred; - } - } - return bestFormat; -} - -- (void)captureOutput:(AVCaptureOutput *)output - didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(NSObject *)connection { - if (output == _captureVideoOutput.avOutput) { - CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CFRetain(newBuffer); - - __block CVPixelBufferRef previousPixelBuffer = nil; - // Use `dispatch_sync` to avoid unnecessary context switch under common non-contest scenarios; - // Under rare contest scenarios, it will not block for too long since the critical section is - // quite lightweight. - dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ - // No need weak self because it's dispatch_sync. - previousPixelBuffer = self.latestPixelBuffer; - self.latestPixelBuffer = newBuffer; - }); - if (previousPixelBuffer) { - CFRelease(previousPixelBuffer); - } - if (_onFrameAvailable) { - _onFrameAvailable(); - } - } - if (!CMSampleBufferDataIsReady(sampleBuffer)) { - [self reportErrorMessage:@"sample buffer is not ready. Skipping sample"]; - return; - } - if (_isStreamingImages) { - FlutterEventSink eventSink = _imageStreamHandler.eventSink; - if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) { - self.streamingPendingFramesCount++; - CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - // Must lock base address before accessing the pixel data - CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer); - size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer); - - NSMutableArray *planes = [NSMutableArray array]; - - const Boolean isPlanar = CVPixelBufferIsPlanar(pixelBuffer); - size_t planeCount; - if (isPlanar) { - planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); - } else { - planeCount = 1; - } - - for (int i = 0; i < planeCount; i++) { - void *planeAddress; - size_t bytesPerRow; - size_t height; - size_t width; - - if (isPlanar) { - planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i); - bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i); - height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i); - width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i); - } else { - planeAddress = CVPixelBufferGetBaseAddress(pixelBuffer); - bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); - height = CVPixelBufferGetHeight(pixelBuffer); - width = CVPixelBufferGetWidth(pixelBuffer); - } - - NSNumber *length = @(bytesPerRow * height); - NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue]; - - NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary]; - planeBuffer[@"bytesPerRow"] = @(bytesPerRow); - planeBuffer[@"width"] = @(width); - planeBuffer[@"height"] = @(height); - planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes]; - - [planes addObject:planeBuffer]; - } - // Lock the base address before accessing pixel data, and unlock it afterwards. - // Done accessing the `pixelBuffer` at this point. - CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary]; - imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth]; - imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight]; - imageBuffer[@"format"] = @(_videoFormat); - imageBuffer[@"planes"] = planes; - imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]]; - Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]); - Float64 nsExposureDuration = 1000000000 * exposureDuration; - imageBuffer[@"sensorExposureTime"] = [NSNumber numberWithInt:nsExposureDuration]; - imageBuffer[@"sensorSensitivity"] = [NSNumber numberWithFloat:[_captureDevice ISO]]; - - dispatch_async(dispatch_get_main_queue(), ^{ - eventSink(imageBuffer); - }); - } - } - if (_isRecording && !_isRecordingPaused) { - if (_videoWriter.status == AVAssetWriterStatusFailed) { - [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; - return; - } - - // ignore audio samples until the first video sample arrives to avoid black frames - // https://github.com/flutter/flutter/issues/57831 - if (_isFirstVideoSample && output != _captureVideoOutput.avOutput) { - return; - } - - CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - - if (_isFirstVideoSample) { - [_videoWriter startSessionAtSourceTime:currentSampleTime]; - // fix sample times not being numeric when pause/resume happens before first sample buffer - // arrives - // https://github.com/flutter/flutter/issues/132014 - _lastVideoSampleTime = currentSampleTime; - _lastAudioSampleTime = currentSampleTime; - _isFirstVideoSample = NO; - } - - if (output == _captureVideoOutput.avOutput) { - if (_videoIsDisconnected) { - _videoIsDisconnected = NO; - - if (_videoTimeOffset.value == 0) { - _videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime); - } else { - CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime); - _videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset); - } - - return; - } - - _lastVideoSampleTime = currentSampleTime; - - CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset); - // do not append sample buffer when readyForMoreMediaData is NO to avoid crash - // https://github.com/flutter/flutter/issues/132073 - if (_videoWriterInput.readyForMoreMediaData) { - [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime]; - } - } else { - CMTime dur = CMSampleBufferGetDuration(sampleBuffer); - - if (dur.value > 0) { - currentSampleTime = CMTimeAdd(currentSampleTime, dur); - } - - if (_audioIsDisconnected) { - _audioIsDisconnected = NO; - - if (_audioTimeOffset.value == 0) { - _audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime); - } else { - CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime); - _audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset); - } - - return; - } - - _lastAudioSampleTime = currentSampleTime; - - if (_audioTimeOffset.value != 0) { - CMSampleBufferRef adjustedSampleBuffer = - [self copySampleBufferWithAdjustedTime:sampleBuffer by:_audioTimeOffset]; - [self newAudioSample:adjustedSampleBuffer]; - CFRelease(adjustedSampleBuffer); - } else { - [self newAudioSample:sampleBuffer]; - } - } - } -} - -- (CMSampleBufferRef)copySampleBufferWithAdjustedTime:(CMSampleBufferRef)sample by:(CMTime)offset { - CMItemCount count; - CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); - CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count); - CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); - for (CMItemCount i = 0; i < count; i++) { - pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); - pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); - } - CMSampleBufferRef sout; - CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); - free(pInfo); - return sout; -} - -- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer { - if (_videoWriter.status != AVAssetWriterStatusWriting) { - if (_videoWriter.status == AVAssetWriterStatusFailed) { - [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; - } - return; - } - if (_videoWriterInput.readyForMoreMediaData) { - if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) { - [self reportErrorMessage:@"Unable to write to video input"]; - } - } -} - -- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer { - if (_videoWriter.status != AVAssetWriterStatusWriting) { - if (_videoWriter.status == AVAssetWriterStatusFailed) { - [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; - } - return; - } - if (_audioWriterInput.readyForMoreMediaData) { - if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) { - [self reportErrorMessage:@"Unable to write to audio input"]; - } - } -} - -- (void)close { - [self stop]; - for (AVCaptureInput *input in [_videoCaptureSession inputs]) { - [_videoCaptureSession removeInput:[[FLTDefaultCaptureInput alloc] initWithInput:input]]; - } - for (AVCaptureOutput *output in [_videoCaptureSession outputs]) { - [_videoCaptureSession removeOutput:output]; - } - for (AVCaptureInput *input in [_audioCaptureSession inputs]) { - [_audioCaptureSession removeInput:[[FLTDefaultCaptureInput alloc] initWithInput:input]]; - } - for (AVCaptureOutput *output in [_audioCaptureSession outputs]) { - [_audioCaptureSession removeOutput:output]; - } -} - -- (void)dealloc { - if (_latestPixelBuffer) { - CFRelease(_latestPixelBuffer); - } - [_motionManager stopAccelerometerUpdates]; -} - -- (CVPixelBufferRef)copyPixelBuffer { - __block CVPixelBufferRef pixelBuffer = nil; - // Use `dispatch_sync` because `copyPixelBuffer` API requires synchronous return. - dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ - // No need weak self because it's dispatch_sync. - pixelBuffer = self.latestPixelBuffer; - self.latestPixelBuffer = nil; - }); - return pixelBuffer; -} - -- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion - messengerForStreaming:(nullable NSObject *)messenger { - if (!_isRecording) { - if (messenger != nil) { - [self startImageStreamWithMessenger:messenger]; - } - - NSError *error; - _videoRecordingPath = [self getTemporaryFilePathWithExtension:@"mp4" - subfolder:@"videos" - prefix:@"REC_" - error:error]; - if (error) { - completion(FlutterErrorFromNSError(error)); - return; - } - if (![self setupWriterForPath:_videoRecordingPath]) { - completion([FlutterError errorWithCode:@"IOError" - message:@"Setup Writer Failed" - details:nil]); - return; - } - // startWriting should not be called in didOutputSampleBuffer where it can cause state - // in which _isRecording is YES but _videoWriter.status is AVAssetWriterStatusUnknown - // in stopVideoRecording if it is called after startVideoRecording but before - // didOutputSampleBuffer had chance to call startWriting and lag at start of video - // https://github.com/flutter/flutter/issues/132016 - // https://github.com/flutter/flutter/issues/151319 - [_videoWriter startWriting]; - _isFirstVideoSample = YES; - _isRecording = YES; - _isRecordingPaused = NO; - _videoTimeOffset = CMTimeMake(0, 1); - _audioTimeOffset = CMTimeMake(0, 1); - _videoIsDisconnected = NO; - _audioIsDisconnected = NO; - completion(nil); - } else { - completion([FlutterError errorWithCode:@"Error" - message:@"Video is already recording" - details:nil]); - } -} - -- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable, - FlutterError *_Nullable))completion { - if (_isRecording) { - _isRecording = NO; - - // when _isRecording is YES startWriting was already called so _videoWriter.status - // is always either AVAssetWriterStatusWriting or AVAssetWriterStatusFailed and - // finishWritingWithCompletionHandler does not throw exception so there is no need - // to check _videoWriter.status - [_videoWriter finishWritingWithCompletionHandler:^{ - if (self->_videoWriter.status == AVAssetWriterStatusCompleted) { - [self updateOrientation]; - completion(self->_videoRecordingPath, nil); - self->_videoRecordingPath = nil; - } else { - completion(nil, [FlutterError errorWithCode:@"IOError" - message:@"AVAssetWriter could not finish writing!" - details:nil]); - } - }]; - } else { - NSError *error = - [NSError errorWithDomain:NSCocoaErrorDomain - code:NSURLErrorResourceUnavailable - userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}]; - completion(nil, FlutterErrorFromNSError(error)); - } -} - -- (void)pauseVideoRecording { - _isRecordingPaused = YES; - _videoIsDisconnected = YES; - _audioIsDisconnected = YES; -} - -- (void)resumeVideoRecording { - _isRecordingPaused = NO; -} - -- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)pigeonOrientation { - UIDeviceOrientation orientation = - FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation); - if (_lockedCaptureOrientation != orientation) { - _lockedCaptureOrientation = orientation; - [self updateOrientation]; - } -} - -- (void)unlockCaptureOrientation { - _lockedCaptureOrientation = UIDeviceOrientationUnknown; - [self updateOrientation]; -} - -- (void)setFlashMode:(FCPPlatformFlashMode)mode - withCompletion:(void (^)(FlutterError *_Nullable))completion { - if (mode == FCPPlatformFlashModeTorch) { - if (!_captureDevice.hasTorch) { - completion([FlutterError errorWithCode:@"setFlashModeFailed" - message:@"Device does not support torch mode" - details:nil]); - return; - } - if (!_captureDevice.isTorchAvailable) { - completion([FlutterError errorWithCode:@"setFlashModeFailed" - message:@"Torch mode is currently not available" - details:nil]); - return; - } - if (_captureDevice.torchMode != AVCaptureTorchModeOn) { - [_captureDevice lockForConfiguration:nil]; - [_captureDevice setTorchMode:AVCaptureTorchModeOn]; - [_captureDevice unlockForConfiguration]; - } - } else { - if (!_captureDevice.hasFlash) { - completion([FlutterError errorWithCode:@"setFlashModeFailed" - message:@"Device does not have flash capabilities" - details:nil]); - return; - } - AVCaptureFlashMode avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode); - if (![_capturePhotoOutput.supportedFlashModes - containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) { - completion([FlutterError errorWithCode:@"setFlashModeFailed" - message:@"Device does not support this specific flash mode" - details:nil]); - return; - } - if (_captureDevice.torchMode != AVCaptureTorchModeOff) { - [_captureDevice lockForConfiguration:nil]; - [_captureDevice setTorchMode:AVCaptureTorchModeOff]; - [_captureDevice unlockForConfiguration]; - } - } - _flashMode = mode; - completion(nil); -} - -- (void)setExposureMode:(FCPPlatformExposureMode)mode { - _exposureMode = mode; - [self applyExposureMode]; -} - -- (void)applyExposureMode { - [_captureDevice lockForConfiguration:nil]; - switch (self.exposureMode) { - case FCPPlatformExposureModeLocked: - // AVCaptureExposureModeAutoExpose automatically adjusts the exposure one time, and then - // locks exposure for the device - [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; - break; - case FCPPlatformExposureModeAuto: - if ([_captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { - [_captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; - } else { - [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; - } - break; - } - [_captureDevice unlockForConfiguration]; -} - -- (void)setFocusMode:(FCPPlatformFocusMode)mode { - _focusMode = mode; - [self applyFocusMode]; -} - -- (void)applyFocusMode { - [self applyFocusMode:_focusMode onDevice:_captureDevice]; -} - -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode - onDevice:(NSObject *)captureDevice { - [captureDevice lockForConfiguration:nil]; - switch (focusMode) { - case FCPPlatformFocusModeLocked: - // AVCaptureFocusModeAutoFocus automatically adjusts the focus one time, and then locks focus - if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { - [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; - } - break; - case FCPPlatformFocusModeAuto: - if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { - [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { - [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; - } - break; - } - [captureDevice unlockForConfiguration]; -} - -- (void)pausePreview { - _isPreviewPaused = true; -} - -- (void)resumePreview { - _isPreviewPaused = false; -} - -- (void)setDescriptionWhileRecording:(NSString *)cameraName - withCompletion:(void (^)(FlutterError *_Nullable))completion { - if (!_isRecording) { - completion([FlutterError errorWithCode:@"setDescriptionWhileRecordingFailed" - message:@"Device was not recording" - details:nil]); - return; - } - - _captureDevice = self.captureDeviceFactory(cameraName); - - NSObject *oldConnection = - [_captureVideoOutput connectionWithMediaType:AVMediaTypeVideo]; - - // Stop video capture from the old output. - [_captureVideoOutput setSampleBufferDelegate:nil queue:nil]; - - // Remove the old video capture connections. - [_videoCaptureSession beginConfiguration]; - [_videoCaptureSession removeInput:_captureVideoInput]; - [_videoCaptureSession removeOutput:_captureVideoOutput.avOutput]; - - NSError *error = nil; - AVCaptureConnection *newConnection = [self createConnection:&error]; - if (error) { - completion(FlutterErrorFromNSError(error)); - return; - } - - // Keep the same orientation the old connections had. - if (oldConnection && newConnection.isVideoOrientationSupported) { - newConnection.videoOrientation = oldConnection.videoOrientation; - } - - // Add the new connections to the session. - if (![_videoCaptureSession canAddInput:_captureVideoInput]) - completion([FlutterError errorWithCode:@"VideoError" - message:@"Unable switch video input" - details:nil]); - [_videoCaptureSession addInputWithNoConnections:_captureVideoInput]; - if (![_videoCaptureSession canAddOutput:_captureVideoOutput.avOutput]) - completion([FlutterError errorWithCode:@"VideoError" - message:@"Unable switch video output" - details:nil]); - [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput.avOutput]; - if (![_videoCaptureSession canAddConnection:newConnection]) - completion([FlutterError errorWithCode:@"VideoError" - message:@"Unable switch video connection" - details:nil]); - [_videoCaptureSession addConnection:newConnection]; - [_videoCaptureSession commitConfiguration]; - - completion(nil); -} - -- (CGPoint)CGPointForPoint:(nonnull FCPPlatformPoint *)point - withOrientation:(UIDeviceOrientation)orientation { - double x = point.x; - double y = point.y; - switch (orientation) { - case UIDeviceOrientationPortrait: // 90 ccw - y = 1 - point.x; - x = point.y; - break; - case UIDeviceOrientationPortraitUpsideDown: // 90 cw - x = 1 - point.y; - y = point.x; - break; - case UIDeviceOrientationLandscapeRight: // 180 - x = 1 - point.x; - y = 1 - point.y; - break; - case UIDeviceOrientationLandscapeLeft: - default: - // No rotation required - break; - } - return CGPointMake(x, y); -} - -- (void)setExposurePoint:(FCPPlatformPoint *)point - withCompletion:(void (^)(FlutterError *_Nullable))completion { - if (!_captureDevice.exposurePointOfInterestSupported) { - completion([FlutterError errorWithCode:@"setExposurePointFailed" - message:@"Device does not have exposure point capabilities" - details:nil]); - return; - } - UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; - [_captureDevice lockForConfiguration:nil]; - // A nil point resets to the center. - [_captureDevice - setExposurePointOfInterest:[self CGPointForPoint:(point - ?: [FCPPlatformPoint makeWithX:0.5 - y:0.5]) - withOrientation:orientation]]; - [_captureDevice unlockForConfiguration]; - // Retrigger auto exposure - [self applyExposureMode]; - completion(nil); -} - -- (void)setFocusPoint:(FCPPlatformPoint *)point - withCompletion:(void (^)(FlutterError *_Nullable))completion { - if (!_captureDevice.focusPointOfInterestSupported) { - completion([FlutterError errorWithCode:@"setFocusPointFailed" - message:@"Device does not have focus point capabilities" - details:nil]); - return; - } - UIDeviceOrientation orientation = [_deviceOrientationProvider orientation]; - [_captureDevice lockForConfiguration:nil]; - // A nil point resets to the center. - [_captureDevice - setFocusPointOfInterest:[self - CGPointForPoint:(point ?: [FCPPlatformPoint makeWithX:0.5 y:0.5]) - withOrientation:orientation]]; - [_captureDevice unlockForConfiguration]; - // Retrigger auto focus - [self applyFocusMode]; - completion(nil); -} - -- (void)setExposureOffset:(double)offset { - [_captureDevice lockForConfiguration:nil]; - [_captureDevice setExposureTargetBias:offset completionHandler:nil]; - [_captureDevice unlockForConfiguration]; -} - -- (void)startImageStreamWithMessenger:(NSObject *)messenger { - [self startImageStreamWithMessenger:messenger - imageStreamHandler:[[FLTImageStreamHandler alloc] - initWithCaptureSessionQueue:_captureSessionQueue]]; -} - -- (void)startImageStreamWithMessenger:(NSObject *)messenger - imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler { - if (!_isStreamingImages) { - id eventChannel = [FlutterEventChannel - eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream" - binaryMessenger:messenger]; - FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel]; - - _imageStreamHandler = imageStreamHandler; - __weak typeof(self) weakSelf = self; - [threadSafeEventChannel setStreamHandler:_imageStreamHandler - completion:^{ - typeof(self) strongSelf = weakSelf; - if (!strongSelf) return; - - dispatch_async(strongSelf.captureSessionQueue, ^{ - // cannot use the outter strongSelf - typeof(self) strongSelf = weakSelf; - if (!strongSelf) return; - - strongSelf.isStreamingImages = YES; - strongSelf.streamingPendingFramesCount = 0; - }); - }]; - } else { - [self reportErrorMessage:@"Images from camera are already streaming!"]; - } -} - -- (void)stopImageStream { - if (_isStreamingImages) { - _isStreamingImages = NO; - _imageStreamHandler = nil; - } else { - [self reportErrorMessage:@"Images from camera are not streaming!"]; - } -} - -- (void)receivedImageStreamData { - self.streamingPendingFramesCount--; -} - -- (void)setZoomLevel:(CGFloat)zoom withCompletion:(void (^)(FlutterError *_Nullable))completion { - if (_captureDevice.maxAvailableVideoZoomFactor < zoom || - _captureDevice.minAvailableVideoZoomFactor > zoom) { - NSString *errorMessage = [NSString - stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).", - _captureDevice.minAvailableVideoZoomFactor, - _captureDevice.maxAvailableVideoZoomFactor]; - - completion([FlutterError errorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil]); - return; - } - - NSError *error = nil; - if (![_captureDevice lockForConfiguration:&error]) { - completion(FlutterErrorFromNSError(error)); - return; - } - _captureDevice.videoZoomFactor = zoom; - [_captureDevice unlockForConfiguration]; - - completion(nil); -} - -- (CGFloat)minimumAvailableZoomFactor { - return _captureDevice.minAvailableVideoZoomFactor; -} - -- (CGFloat)maximumAvailableZoomFactor { - return _captureDevice.maxAvailableVideoZoomFactor; -} - -- (CGFloat)minimumExposureOffset { - return _captureDevice.minExposureTargetBias; -} - -- (CGFloat)maximumExposureOffset { - return _captureDevice.maxExposureTargetBias; -} - -- (BOOL)setupWriterForPath:(NSString *)path { - NSError *error = nil; - NSURL *outputURL; - if (path != nil) { - outputURL = [NSURL fileURLWithPath:path]; - } else { - return NO; - } - - [self setUpCaptureSessionForAudioIfNeeded]; - - _videoWriter = _assetWriterFactory(outputURL, AVFileTypeMPEG4, &error); - - NSParameterAssert(_videoWriter); - if (error) { - [self reportErrorMessage:error.description]; - return NO; - } - - NSMutableDictionary *videoSettings = [[_mediaSettingsAVWrapper - recommendedVideoSettingsForAssetWriterWithFileType:AVFileTypeMPEG4 - forOutput:_captureVideoOutput] mutableCopy]; - - if (_mediaSettings.videoBitrate || _mediaSettings.framesPerSecond) { - NSMutableDictionary *compressionProperties = [[NSMutableDictionary alloc] init]; - - if (_mediaSettings.videoBitrate) { - compressionProperties[AVVideoAverageBitRateKey] = _mediaSettings.videoBitrate; - } - - if (_mediaSettings.framesPerSecond) { - compressionProperties[AVVideoExpectedSourceFrameRateKey] = _mediaSettings.framesPerSecond; - } - - videoSettings[AVVideoCompressionPropertiesKey] = compressionProperties; - } - - _videoWriterInput = - [_mediaSettingsAVWrapper assetWriterVideoInputWithOutputSettings:videoSettings]; - - _videoAdaptor = _inputPixelBufferAdaptorFactory( - _videoWriterInput, @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)}); - - NSParameterAssert(_videoWriterInput); - - _videoWriterInput.expectsMediaDataInRealTime = YES; - - // Add the audio input - if (_mediaSettings.enableAudio) { - AudioChannelLayout acl; - bzero(&acl, sizeof(acl)); - acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; - NSMutableDictionary *audioOutputSettings = [@{ - AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC], - AVSampleRateKey : [NSNumber numberWithFloat:44100.0], - AVNumberOfChannelsKey : [NSNumber numberWithInt:1], - AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(acl)], - } mutableCopy]; - - if (_mediaSettings.audioBitrate) { - audioOutputSettings[AVEncoderBitRateKey] = _mediaSettings.audioBitrate; - } - - _audioWriterInput = - [_mediaSettingsAVWrapper assetWriterAudioInputWithOutputSettings:audioOutputSettings]; - - _audioWriterInput.expectsMediaDataInRealTime = YES; - - [_mediaSettingsAVWrapper addInput:_audioWriterInput toAssetWriter:_videoWriter]; - [_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue]; - } - - if (self.flashMode == FCPPlatformFlashModeTorch) { - [self.captureDevice lockForConfiguration:nil]; - [self.captureDevice setTorchMode:AVCaptureTorchModeOn]; - [self.captureDevice unlockForConfiguration]; - } - - [_mediaSettingsAVWrapper addInput:_videoWriterInput toAssetWriter:_videoWriter]; - - [_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue]; - - return YES; -} - -// This function, although slightly modified, is also in video_player_avfoundation. -// Both need to do the same thing and run on the same thread (for example main thread). -// Configure application wide audio session manually to prevent overwriting flag -// MixWithOthers by capture session. -// Only change category if it is considered an upgrade which means it can only enable -// ability to play in silent mode or ability to record audio but never disables it, -// that could affect other plugins which depend on this global state. Only change -// category or options if there is change to prevent unnecessary lags and silence. -static void upgradeAudioSessionCategory(AVAudioSessionCategory requestedCategory, - AVAudioSessionCategoryOptions options) { - NSSet *playCategories = [NSSet - setWithObjects:AVAudioSessionCategoryPlayback, AVAudioSessionCategoryPlayAndRecord, nil]; - NSSet *recordCategories = - [NSSet setWithObjects:AVAudioSessionCategoryRecord, AVAudioSessionCategoryPlayAndRecord, nil]; - NSSet *requiredCategories = - [NSSet setWithObjects:requestedCategory, AVAudioSession.sharedInstance.category, nil]; - BOOL requiresPlay = [requiredCategories intersectsSet:playCategories]; - BOOL requiresRecord = [requiredCategories intersectsSet:recordCategories]; - if (requiresPlay && requiresRecord) { - requestedCategory = AVAudioSessionCategoryPlayAndRecord; - } else if (requiresPlay) { - requestedCategory = AVAudioSessionCategoryPlayback; - } else if (requiresRecord) { - requestedCategory = AVAudioSessionCategoryRecord; - } - options = AVAudioSession.sharedInstance.categoryOptions | options; - if ([requestedCategory isEqualToString:AVAudioSession.sharedInstance.category] && - options == AVAudioSession.sharedInstance.categoryOptions) { - return; - } - [AVAudioSession.sharedInstance setCategory:requestedCategory withOptions:options error:nil]; -} - -- (void)setUpCaptureSessionForAudioIfNeeded { - // Don't setup audio twice or we will lose the audio. - if (!_mediaSettings.enableAudio || _isAudioSetup) { - return; - } - - NSError *error = nil; - // Create a device input with the device and add it to the session. - // Setup the audio input. - NSObject *audioDevice = [[FLTDefaultCaptureDevice alloc] - initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]]; - NSObject *audioInput = - [_captureDeviceInputFactory deviceInputWithDevice:audioDevice error:&error]; - if (error) { - [self reportErrorMessage:error.description]; - } - // Setup the audio output. - _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; - - dispatch_block_t block = ^{ - // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other - // plugins like video_player. - upgradeAudioSessionCategory(AVAudioSessionCategoryPlayAndRecord, - AVAudioSessionCategoryOptionDefaultToSpeaker | - AVAudioSessionCategoryOptionAllowBluetoothA2DP | - AVAudioSessionCategoryOptionAllowAirPlay); - }; - if (!NSThread.isMainThread) { - dispatch_sync(dispatch_get_main_queue(), block); - } else { - block(); - } - - if ([_audioCaptureSession canAddInput:audioInput]) { - [_audioCaptureSession addInput:audioInput]; - - if ([_audioCaptureSession canAddOutput:_audioOutput]) { - [_audioCaptureSession addOutput:_audioOutput]; - _isAudioSetup = YES; - } else { - [self reportErrorMessage:@"Unable to add Audio input/output to session capture"]; - _isAudioSetup = NO; - } - } -} - -- (void)reportErrorMessage:(NSString *)errorMessage { - __weak typeof(self) weakSelf = self; - FLTEnsureToRunOnMainQueue(^{ - [weakSelf.dartAPI reportError:errorMessage - completion:^(FlutterError *error){ - // Ignore any errors, as this is just an event broadcast. - }]; - }); -} - -@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m index 88b16d0cee5b..0c934d5d4836 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m @@ -9,6 +9,7 @@ @implementation FLTCamConfiguration - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsWrapper captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + audioCaptureDeviceFactory:(AudioCaptureDeviceFactory)audioCaptureDeviceFactory captureSessionFactory:(CaptureSessionFactory)captureSessionFactory captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceInputFactory: @@ -22,6 +23,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _videoCaptureSession = captureSessionFactory(); _audioCaptureSession = captureSessionFactory(); _captureDeviceFactory = captureDeviceFactory; + _audioCaptureDeviceFactory = audioCaptureDeviceFactory; _orientation = [[UIDevice currentDevice] orientation]; _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; _videoDimensionsForFormat = ^CMVideoDimensions(NSObject *format) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/QueueUtils.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/QueueUtils.m index cb0895b5c2e8..6e31e9756be7 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/QueueUtils.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/QueueUtils.m @@ -4,8 +4,6 @@ #import "./include/camera_avfoundation/QueueUtils.h" -const char *FLTCaptureSessionQueueSpecific = "capture_session_queue"; - void FLTEnsureToRunOnMainQueue(dispatch_block_t block) { if (!NSThread.isMainThread) { dispatch_async(dispatch_get_main_queue(), block); @@ -13,7 +11,3 @@ void FLTEnsureToRunOnMainQueue(dispatch_block_t block) { block(); } } - -void FLTDispatchQueueSetSpecific(dispatch_queue_t queue, const void *key) { - dispatch_queue_set_specific(queue, key, (void *)key, NULL); -} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h deleted file mode 100644 index 197bf63bcc2a..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import AVFoundation; -@import Foundation; -@import Flutter; - -#import "CameraProperties.h" -#import "FLTCamConfiguration.h" -#import "FLTCamMediaSettingsAVWrapper.h" -#import "FLTCaptureDevice.h" -#import "FLTDeviceOrientationProviding.h" -#import "messages.g.h" - -NS_ASSUME_NONNULL_BEGIN - -/// A class that manages camera's state and performs camera operations. -@interface FLTCam : NSObject - -@property(readonly, nonatomic) NSObject *captureDevice; -@property(readonly, nonatomic) CGSize previewSize; -@property(assign, nonatomic) BOOL isPreviewPaused; -@property(nonatomic, copy) void (^onFrameAvailable)(void); -/// The API instance used to communicate with the Dart side of the plugin. Once initially set, this -/// should only ever be accessed on the main thread. -@property(nonatomic) FCPCameraEventApi *dartAPI; -// Format used for video and image streaming. -@property(assign, nonatomic) FourCharCode videoFormat; -@property(assign, nonatomic) FCPPlatformImageFileFormat fileFormat; -@property(assign, nonatomic) CGFloat minimumAvailableZoomFactor; -@property(assign, nonatomic) CGFloat maximumAvailableZoomFactor; -@property(assign, nonatomic) CGFloat minimumExposureOffset; -@property(assign, nonatomic) CGFloat maximumExposureOffset; - -/// Initializes an `FLTCam` instance with the given configuration. -/// @param error report to the caller if any error happened creating the camera. -- (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error; - -/// Informs the Dart side of the plugin of the current camera state and capabilities. -- (void)reportInitializationState; -- (void)start; -- (void)stop; -- (void)setDeviceOrientation:(UIDeviceOrientation)orientation; -- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, - FlutterError *_Nullable))completion; -- (void)close; -- (void)setImageFileFormat:(FCPPlatformImageFileFormat)fileFormat; -/// Starts recording a video with an optional streaming messenger. -/// If the messenger is non-nil then it will be called for each -/// captured frame, allowing streaming concurrently with recording. -/// -/// @param messenger Nullable messenger for capturing each frame. -- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion - messengerForStreaming:(nullable NSObject *)messenger; -- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable, - FlutterError *_Nullable))completion; -- (void)pauseVideoRecording; -- (void)resumeVideoRecording; -- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation; -- (void)unlockCaptureOrientation; -- (void)setFlashMode:(FCPPlatformFlashMode)mode - withCompletion:(void (^)(FlutterError *_Nullable))completion; -- (void)setExposureMode:(FCPPlatformExposureMode)mode; -/// Sets FocusMode on the current AVCaptureDevice. -/// -/// If the @c focusMode is set to FocusModeAuto the AVCaptureDevice is configured to use -/// AVCaptureFocusModeContinuousModeAutoFocus when supported, otherwise it is set to -/// AVCaptureFocusModeAutoFocus. If neither AVCaptureFocusModeContinuousModeAutoFocus nor -/// AVCaptureFocusModeAutoFocus are supported focus mode will not be set. -/// If @c focusMode is set to FocusModeLocked the AVCaptureDevice is configured to use -/// AVCaptureFocusModeAutoFocus. If AVCaptureFocusModeAutoFocus is not supported focus mode will not -/// be set. -/// -/// @param mode The focus mode that should be applied. -- (void)setFocusMode:(FCPPlatformFocusMode)mode; - -/// Acknowledges the receipt of one image stream frame. -/// -/// This should be called each time a frame is received. Failing to call it may -/// cause later frames to be dropped instead of streamed. -- (void)receivedImageStreamData; - -- (void)pausePreview; -- (void)resumePreview; -- (void)setDescriptionWhileRecording:(NSString *)cameraName - withCompletion:(void (^)(FlutterError *_Nullable))completion; - -/// Sets the exposure point, in a (0,1) coordinate system. -/// -/// If @c point is nil, the exposure point will reset to the center. -- (void)setExposurePoint:(nullable FCPPlatformPoint *)point - withCompletion:(void (^)(FlutterError *_Nullable))completion; - -/// Sets the focus point, in a (0,1) coordinate system. -/// -/// If @c point is nil, the focus point will reset to the center. -- (void)setFocusPoint:(nullable FCPPlatformPoint *)point - withCompletion:(void (^)(FlutterError *_Nullable))completion - NS_SWIFT_NAME(setFocusPoint(_:completion:)); -- (void)setExposureOffset:(double)offset; -- (void)startImageStreamWithMessenger:(NSObject *)messenger; -- (void)stopImageStream; -- (void)setZoomLevel:(CGFloat)zoom withCompletion:(void (^)(FlutterError *_Nullable))completion; -- (void)setUpCaptureSessionForAudioIfNeeded; - -@end - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h index 8426f129e89a..3e0e47f40658 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h @@ -19,6 +19,8 @@ NS_ASSUME_NONNULL_BEGIN /// Used in tests to inject a device into FLTCam. typedef NSObject *_Nonnull (^CaptureDeviceFactory)(NSString *); +typedef NSObject *_Nonnull (^AudioCaptureDeviceFactory)(); + typedef NSObject *_Nonnull (^CaptureSessionFactory)(void); typedef NSObject *_Nonnull (^AssetWriterFactory)(NSURL *, AVFileType, @@ -38,6 +40,7 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(NSObject *videoCaptureSession; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h deleted file mode 100644 index 3e3a44922dd6..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import "FLTCam.h" -#import "FLTCaptureConnection.h" -#import "FLTCaptureDevice.h" -#import "FLTCapturePhotoOutput.h" -#import "FLTCaptureVideoDataOutput.h" -#import "FLTDeviceOrientationProviding.h" -#import "FLTSavePhotoDelegate.h" - -@interface FLTImageStreamHandler : NSObject - -/// The queue on which `eventSink` property should be accessed. -@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; - -/// The event sink to stream camera events to Dart. -/// -/// The property should only be accessed on `captureSessionQueue`. -/// The block itself should be invoked on the main queue. -@property FlutterEventSink eventSink; - -@end - -// APIs exposed for unit testing. -@interface FLTCam () - -/// The output for video capturing. -@property(strong, nonatomic) NSObject *captureVideoOutput; - -/// The output for photo capturing. Exposed setter for unit tests. -@property(strong, nonatomic) NSObject *capturePhotoOutput; - -/// True when images from the camera are being streamed. -@property(assign, nonatomic) BOOL isStreamingImages; - -/// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the -/// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the -/// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo -/// capture operations may overlap, so FLTCam has to keep track of multiple delegates in progress, -/// instead of just a single delegate reference. -@property(readonly, nonatomic) - NSMutableDictionary *inProgressSavePhotoDelegates; - -/// Delegate callback when receiving a new video or audio sample. -/// Exposed for unit tests. -- (void)captureOutput:(AVCaptureOutput *)output - didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(NSObject *)connection; - -/// Start streaming images. -- (void)startImageStreamWithMessenger:(NSObject *)messenger - imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler; - -@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/QueueUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/QueueUtils.h index bc8fc49840dd..c77d5e2cd065 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/QueueUtils.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/QueueUtils.h @@ -6,9 +6,6 @@ NS_ASSUME_NONNULL_BEGIN -/// Queue-specific context data to be associated with the capture session queue. -extern const char* FLTCaptureSessionQueueSpecific; - /// Ensures the given block to be run on the main queue. /// If caller site is already on the main queue, the block will be run /// synchronously. Otherwise, the block will be dispatched asynchronously to the @@ -16,13 +13,4 @@ extern const char* FLTCaptureSessionQueueSpecific; /// @param block the block to be run on the main queue. extern void FLTEnsureToRunOnMainQueue(dispatch_block_t block); -/// Calls `dispatch_queue_set_specific` with a key that is used to identify the -/// queue. This method is needed for compatibility of Swift implementation with -/// Objective-C code. In Swift, the API for setting key-value pairs on a queue -/// is different, so Swift code need to call this method to set the key-value -/// pair on the queue in a way that's compatible with the existing Objective-C -/// code. -extern void FLTDispatchQueueSetSpecific(dispatch_queue_t queue, - const void* key); - NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h index 96ffe59f0eb6..db32a11a0aea 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h @@ -2,7 +2,6 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#import "FLTCam.h" #import "FLTCamConfiguration.h" #import "FLTCameraDeviceDiscovering.h" #import "FLTCameraPermissionManager.h" diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index e325431c3b82..3647c389c97c 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.19 +version: 0.9.19+1 environment: sdk: ^3.4.0 From 864ec1f0941edd469f477b6863378c2c1765356f Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Wed, 7 May 2025 19:04:08 +0200 Subject: [PATCH 02/11] Remove unnecessary any in FLTDefaultCam --- .../Sources/camera_avfoundation/FLTCam.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift index a5492be99754..eada9f844f96 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift @@ -899,7 +899,7 @@ class FLTDefaultCam: NSObject, FLTCam { } private func updateOrientation( - _ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: any FLTCaptureOutput + _ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: FLTCaptureOutput ) { if let connection = captureOutput.connection(withMediaType: .video), connection.isVideoOrientationSupported From 5bfa7361ae7b4af7fcceaa64ab9792cc3eaf2a14 Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Wed, 7 May 2025 19:13:27 +0200 Subject: [PATCH 03/11] Fix imports for SwiftPM --- .../example/ios/RunnerTests/CameraTestUtils.swift | 4 ++-- .../Sources/camera_avfoundation/FLTImageStreamHandler.swift | 3 +++ .../Sources/camera_avfoundation/QueueUtils.swift | 2 ++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index 12d8d8333e05..3b2ee049c188 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -4,11 +4,11 @@ import XCTest +@testable import camera_avfoundation + // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) @testable import camera_avfoundation_objc -#else - @testable import camera_avfoundation #endif /// Utils for creating default class instances used in tests diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift index 42b26ba207d3..155cd7d85185 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift @@ -2,6 +2,9 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +import Flutter +import ObjectiveC + class FLTImageStreamHandler: NSObject, FlutterStreamHandler { /// The queue on which `eventSink` property should be accessed. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift index 074153a15085..e5f7492bcae5 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +import Dispatch + /// Queue-specific context data to be associated with the capture session queue. let fltCaptureSessionQueueSpecificKey = DispatchSpecificKey() let fltCaptureSessionQueueSpecificValue = "capture_session_queue" From 7eca6e5c3f5e55b3cb1fedc8efafd341e8ada66a Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Wed, 7 May 2025 20:07:54 +0200 Subject: [PATCH 04/11] Add void argument type to AudioCaptureDeviceFactory --- .../include/camera_avfoundation/FLTCamConfiguration.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h index 3e0e47f40658..36b8ac177848 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN /// Used in tests to inject a device into FLTCam. typedef NSObject *_Nonnull (^CaptureDeviceFactory)(NSString *); -typedef NSObject *_Nonnull (^AudioCaptureDeviceFactory)(); +typedef NSObject *_Nonnull (^AudioCaptureDeviceFactory)(void); typedef NSObject *_Nonnull (^CaptureSessionFactory)(void); From 2c2f1dbfa0309fe82dd14b4bb554e4a142d8f32c Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 14:22:50 +0200 Subject: [PATCH 05/11] Rename FLTCam to Camera --- .../ios/Runner.xcodeproj/project.pbxproj | 8 +++---- .../RunnerTests/CameraOrientationTests.swift | 4 ++-- .../CameraPluginDelegatingMethodTests.swift | 4 ++-- .../CameraPluginInitializeCameraTests.swift | 4 ++-- .../ios/RunnerTests/CameraTestUtils.swift | 8 +++---- .../ios/RunnerTests/FLTCamExposureTests.swift | 2 +- .../ios/RunnerTests/FLTCamFocusTests.swift | 2 +- .../FLTCamSetDeviceOrientationTests.swift | 2 +- .../RunnerTests/FLTCamSetFlashModeTests.swift | 2 +- .../ios/RunnerTests/FLTCamZoomTests.swift | 2 +- .../{MockFLTCam.swift => MockCamera.swift} | 2 +- .../ios/RunnerTests/PhotoCaptureTests.swift | 2 +- .../ios/RunnerTests/SampleBufferTests.swift | 2 +- .../ios/RunnerTests/StreamingTests.swift | 2 +- .../{FLTCam.swift => Camera.swift} | 22 +++++++++---------- .../camera_avfoundation/CameraPlugin.swift | 4 ++-- 16 files changed, 36 insertions(+), 36 deletions(-) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/{MockFLTCam.swift => MockCamera.swift} (99%) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/{FLTCam.swift => Camera.swift} (98%) diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 7d6ca0e5068b..161c8ebda27f 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -60,7 +60,7 @@ E1ABED6F2D943B2500AED9CC /* MockCaptureDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15BC7ED2D86D85500F66474 /* MockCaptureDevice.swift */; }; E1ABED722D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */; }; E1ABED732D943DC700AED9CC /* MockCaptureInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */; }; - E1FFEAAD2D6C8DD700B14107 /* MockFLTCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */; }; + E1FFEAAD2D6C8DD700B14107 /* MockCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAC2D6C8DD700B14107 /* MockCamera.swift */; }; E1FFEAAF2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAE2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift */; }; E1FFEAB12D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAB02D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift */; }; /* End PBXBuildFile section */ @@ -154,7 +154,7 @@ E1A5F4E22D80259C0005BA64 /* FLTCamSetFlashModeTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FLTCamSetFlashModeTests.swift; sourceTree = ""; }; E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MockCaptureDeviceInputFactory.swift; sourceTree = ""; }; E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MockCaptureInput.swift; sourceTree = ""; }; - E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockFLTCam.swift; sourceTree = ""; }; + E1FFEAAC2D6C8DD700B14107 /* MockCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockCamera.swift; sourceTree = ""; }; E1FFEAAE2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPluginCreateCameraTests.swift; sourceTree = ""; }; E1FFEAB02D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPluginInitializeCameraTests.swift; sourceTree = ""; }; E67C6DBF6478BE708993169F /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; @@ -245,7 +245,7 @@ E142F1392D85940600824824 /* MockCapturePhotoOutput.swift */, E142F1372D85919700824824 /* MockDeviceOrientationProvider.swift */, E142F1352D8587F900824824 /* MockCameraDeviceDiscoverer.swift */, - E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */, + E1FFEAAC2D6C8DD700B14107 /* MockCamera.swift */, 970ADABF2D6764CC00EFDCD9 /* MockEventChannel.swift */, E12C4FF72D68E85500515E70 /* MockFLTCameraPermissionManager.swift */, 970ADABD2D6740A900EFDCD9 /* MockWritableData.swift */, @@ -572,7 +572,7 @@ E1ABED722D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift in Sources */, 977A25202D5A439300931E34 /* AvailableCamerasTests.swift in Sources */, E142681F2D8566230046CBBC /* CameraTestUtils.swift in Sources */, - E1FFEAAD2D6C8DD700B14107 /* MockFLTCam.swift in Sources */, + E1FFEAAD2D6C8DD700B14107 /* MockCamera.swift in Sources */, E16602952D8471C0003CFE12 /* FLTCamZoomTests.swift in Sources */, 97BD4A102D5CE13500F857D5 /* CameraSessionPresetsTests.swift in Sources */, 979B3E022D5BA48F009BDE1A /* CameraOrientationTests.swift in Sources */, diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift index 920f9f698209..6f3dd6d361b3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift @@ -24,14 +24,14 @@ private final class MockUIDevice: UIDevice { final class CameraOrientationTests: XCTestCase { private func createCameraPlugin() -> ( cameraPlugin: CameraPlugin, - mockCamera: MockFLTCam, + mockCamera: MockCamera, mockEventAPI: MockGlobalEventApi, mockDevice: MockCaptureDevice, mockDeviceDiscoverer: MockCameraDeviceDiscoverer, captureSessionQueue: DispatchQueue ) { let mockDevice = MockCaptureDevice() - let mockCamera = MockFLTCam() + let mockCamera = MockCamera() let mockEventAPI = MockGlobalEventApi() let mockDeviceDiscoverer = MockCameraDeviceDiscoverer() let captureSessionQueue = DispatchQueue(label: "io.flutter.camera.captureSessionQueue") diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift index 27c8d44d4193..aed9c853650c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift @@ -13,8 +13,8 @@ import XCTest /// Tests of `CameraPlugin` methods delegating to `FLTCam` instance final class CameraPluginDelegatingMethodTests: XCTestCase { - private func createCameraPlugin() -> (CameraPlugin, MockFLTCam) { - let mockCamera = MockFLTCam() + private func createCameraPlugin() -> (CameraPlugin, MockCamera) { + let mockCamera = MockCamera() let cameraPlugin = CameraPlugin( registry: MockFlutterTextureRegistry(), diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift index 6a8a2906147a..855d748e905f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift @@ -13,9 +13,9 @@ import XCTest final class CameraPluginInitializeCameraTests: XCTestCase { private func createCameraPlugin() -> ( - CameraPlugin, MockFLTCam, MockGlobalEventApi, DispatchQueue + CameraPlugin, MockCamera, MockGlobalEventApi, DispatchQueue ) { - let mockCamera = MockFLTCam() + let mockCamera = MockCamera() let mockGlobalEventApi = MockGlobalEventApi() let captureSessionQueue = DispatchQueue(label: "io.flutter.camera.captureSessionQueue") diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index 3b2ee049c188..5669213fdb04 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -81,18 +81,18 @@ enum CameraTestUtils { return configuration } - static func createTestCamera(_ configuration: FLTCamConfiguration) -> FLTDefaultCam { - let camera = try? FLTDefaultCam(configuration: configuration) + static func createTestCamera(_ configuration: FLTCamConfiguration) -> DefaultCamera { + let camera = try? DefaultCamera(configuration: configuration) return camera! } - static func createTestCamera() -> FLTDefaultCam { + static func createTestCamera() -> DefaultCamera { return createTestCamera(createTestCameraConfiguration()) } static func createCameraWithCaptureSessionQueue(_ captureSessionQueue: DispatchQueue) - -> FLTDefaultCam + -> DefaultCamera { let configuration = createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift index 670ae621cb68..457be9ce735e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift @@ -12,7 +12,7 @@ import XCTest #endif final class FLTCamExposureTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice, MockDeviceOrientationProvider) { + private func createCamera() -> (Camera, MockCaptureDevice, MockDeviceOrientationProvider) { let mockDevice = MockCaptureDevice() let mockDeviceOrientationProvider = MockDeviceOrientationProvider() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift index efd7869cbee0..94ccf2b80b00 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamSetFocusModeTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice, MockDeviceOrientationProvider) { + private func createCamera() -> (Camera, MockCaptureDevice, MockDeviceOrientationProvider) { let mockDevice = MockCaptureDevice() let mockDeviceOrientationProvider = MockDeviceOrientationProvider() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift index fd24d23798e8..e2f71d89ed26 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamSetDeviceOrientationTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureConnection, MockCaptureConnection) { + private func createCamera() -> (Camera, MockCaptureConnection, MockCaptureConnection) { let camera = CameraTestUtils.createTestCamera() let mockCapturePhotoOutput = MockCapturePhotoOutput() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift index dd06ca592d23..47c68d9c60bd 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamSetFlashModeTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice, MockCapturePhotoOutput) { + private func createCamera() -> (Camera, MockCaptureDevice, MockCapturePhotoOutput) { let mockDevice = MockCaptureDevice() let mockCapturePhotoOutput = MockCapturePhotoOutput() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift index 3e854037e0c8..0d371994e76e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamZoomTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice) { + private func createCamera() -> (Camera, MockCaptureDevice) { let mockDevice = MockCaptureDevice() let configuration = CameraTestUtils.createTestCameraConfiguration() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift similarity index 99% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift index 0904a188979c..da25fb7fd7f7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift @@ -9,7 +9,7 @@ @testable import camera_avfoundation_objc #endif -final class MockFLTCam: NSObject, FLTCam { +final class MockCamera: NSObject, Camera { var setDartApiStub: ((FCPCameraEventApi?) -> Void)? var setOnFrameAvailableStub: (((() -> Void)?) -> Void)? var getMinimumExposureOffsetStub: (() -> CGFloat)? diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift index cad8337bd86a..e9515c7a08c0 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift @@ -14,7 +14,7 @@ import XCTest /// Includes test cases related to photo capture operations for FLTCam class. final class PhotoCaptureTests: XCTestCase { - private func createCam(with captureSessionQueue: DispatchQueue) -> FLTDefaultCam { + private func createCam(with captureSessionQueue: DispatchQueue) -> DefaultCamera { let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue return CameraTestUtils.createTestCamera(configuration) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index 50c5d7ef96c7..c5fd4b4e5f03 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -69,7 +69,7 @@ private class FakeMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { /// Includes test cases related to sample buffer handling for FLTCam class. final class CameraSampleBufferTests: XCTestCase { private func createCamera() -> ( - FLTDefaultCam, + DefaultCamera, MockAssetWriter, MockAssetWriterInputPixelBufferAdaptor, MockAssetWriterInput diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift index 0538e2a40cc6..14c12f15ccf6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift @@ -35,7 +35,7 @@ private class MockImageStreamHandler: FLTImageStreamHandler { } final class StreamingTests: XCTestCase { - private func createCamera() -> (FLTDefaultCam, CMSampleBuffer) { + private func createCamera() -> (DefaultCamera, CMSampleBuffer) { let captureSessionQueue = DispatchQueue(label: "testing") let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift similarity index 98% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index eada9f844f96..e80259301904 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -12,7 +12,7 @@ import Flutter #endif /// A class that manages camera's state and performs camera operations. -protocol FLTCam: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, +protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { /// The API instance used to communicate with the Dart side of the plugin. @@ -100,7 +100,7 @@ protocol FLTCam: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, func close() } -class FLTDefaultCam: NSObject, FLTCam { +class DefaultCamera: NSObject, Camera { var dartAPI: FCPCameraEventApi? var onFrameAvailable: (() -> Void)? @@ -332,7 +332,7 @@ class FLTDefaultCam: NSObject, FLTCam { deviceOrientation = configuration.orientation let connection: AVCaptureConnection - (captureVideoInput, captureVideoOutput, connection) = try FLTDefaultCam.createConnection( + (captureVideoInput, captureVideoOutput, connection) = try DefaultCamera.createConnection( captureDevice: captureDevice, videoFormat: videoFormat, captureDeviceInputFactory: configuration.captureDeviceInputFactory) @@ -367,7 +367,7 @@ class FLTDefaultCam: NSObject, FLTCam { throw error } - FLTDefaultCam.selectBestFormatForRequestedFrameRate( + DefaultCamera.selectBestFormatForRequestedFrameRate( captureDevice: captureDevice, mediaSettings: mediaSettings, targetFrameRate: targetFrameRate.doubleValue, @@ -508,7 +508,7 @@ class FLTDefaultCam: NSObject, FLTCam { let block = { // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other // plugins like video_player. - FLTDefaultCam.upgradeAudioSessionCategory( + DefaultCamera.upgradeAudioSessionCategory( requestedCategory: .playAndRecord, options: [.defaultToSpeaker, .allowBluetoothA2DP, .allowAirPlay] ) @@ -637,7 +637,7 @@ class FLTDefaultCam: NSObject, FLTCam { prefix: "REC_") self.videoRecordingPath = videoRecordingPath } catch let error as NSError { - completion(FLTDefaultCam.flutterErrorFromNSError(error)) + completion(DefaultCamera.flutterErrorFromNSError(error)) return } @@ -788,7 +788,7 @@ class FLTDefaultCam: NSObject, FLTCam { code: URLError.resourceUnavailable.rawValue, userInfo: [NSLocalizedDescriptionKey: "Video is not recording!"] ) - completion(nil, FLTDefaultCam.flutterErrorFromNSError(error)) + completion(nil, DefaultCamera.flutterErrorFromNSError(error)) } } @@ -822,7 +822,7 @@ class FLTDefaultCam: NSObject, FLTCam { subfolder: "pictures", prefix: "CAP_") } catch let error as NSError { - completion(nil, FLTDefaultCam.flutterErrorFromNSError(error)) + completion(nil, DefaultCamera.flutterErrorFromNSError(error)) return } @@ -840,7 +840,7 @@ class FLTDefaultCam: NSObject, FLTCam { } if let error = error { - completion(nil, FLTDefaultCam.flutterErrorFromNSError(error as NSError)) + completion(nil, DefaultCamera.flutterErrorFromNSError(error as NSError)) } else { assert(path != nil, "Path must not be nil if no error.") completion(path, nil) @@ -1091,7 +1091,7 @@ class FLTDefaultCam: NSObject, FLTCam { do { try captureDevice.lockForConfiguration() } catch let error as NSError { - completion(FLTDefaultCam.flutterErrorFromNSError(error)) + completion(DefaultCamera.flutterErrorFromNSError(error)) return } @@ -1190,7 +1190,7 @@ class FLTDefaultCam: NSObject, FLTCam { let newConnection: AVCaptureConnection do { - (captureVideoInput, captureVideoOutput, newConnection) = try FLTDefaultCam.createConnection( + (captureVideoInput, captureVideoOutput, newConnection) = try DefaultCamera.createConnection( captureDevice: captureDevice, videoFormat: videoFormat, captureDeviceInputFactory: captureDeviceInputFactory) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index f736b2c512b1..a999b1712b77 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -24,7 +24,7 @@ public final class CameraPlugin: NSObject, FlutterPlugin { private let captureSessionQueue: DispatchQueue /// An internal camera object that manages camera's state and performs camera operations. - var camera: FLTCam? + var camera: Camera? public static func register(with registrar: FlutterPluginRegistrar) { let instance = CameraPlugin( @@ -250,7 +250,7 @@ extension CameraPlugin: FCPCameraApi { ) do { - let newCamera = try FLTDefaultCam(configuration: camConfiguration) + let newCamera = try DefaultCamera(configuration: camConfiguration) camera?.close() camera = newCamera From f2e0674e100686a7f60e1756cbc7a81143574988 Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 14:25:53 +0200 Subject: [PATCH 06/11] Remove flt prefix from capture session queue specific --- .../ios/RunnerTests/PhotoCaptureTests.swift | 20 +++++++++---------- .../Sources/camera_avfoundation/Camera.swift | 4 ++-- .../camera_avfoundation/CameraPlugin.swift | 4 ++-- .../camera_avfoundation/QueueUtils.swift | 4 ++-- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift index e9515c7a08c0..a82cad14ee64 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift @@ -25,8 +25,8 @@ final class PhotoCaptureTests: XCTestCase { description: "Must send error to result if save photo delegate completes with error.") let captureSessionQueue = DispatchQueue(label: "capture_session_queue") captureSessionQueue.setSpecific( - key: fltCaptureSessionQueueSpecificKey, - value: fltCaptureSessionQueueSpecificValue) + key: captureSessionQueueSpecificKey, + value: captureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) let error = NSError(domain: "test", code: 0, userInfo: nil) @@ -59,8 +59,8 @@ final class PhotoCaptureTests: XCTestCase { description: "Must send file path to result if save photo delegate completes with file path.") let captureSessionQueue = DispatchQueue(label: "capture_session_queue") captureSessionQueue.setSpecific( - key: fltCaptureSessionQueueSpecificKey, - value: fltCaptureSessionQueueSpecificValue) + key: captureSessionQueueSpecificKey, + value: captureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) let filePath = "test" @@ -94,8 +94,8 @@ final class PhotoCaptureTests: XCTestCase { let captureSessionQueue = DispatchQueue(label: "capture_session_queue") captureSessionQueue.setSpecific( - key: fltCaptureSessionQueueSpecificKey, - value: fltCaptureSessionQueueSpecificValue) + key: captureSessionQueueSpecificKey, + value: captureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) cam.setImageFileFormat(FCPPlatformImageFileFormat.heif) @@ -130,8 +130,8 @@ final class PhotoCaptureTests: XCTestCase { let captureSessionQueue = DispatchQueue(label: "capture_session_queue") captureSessionQueue.setSpecific( - key: fltCaptureSessionQueueSpecificKey, - value: fltCaptureSessionQueueSpecificValue) + key: captureSessionQueueSpecificKey, + value: captureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) cam.setImageFileFormat(FCPPlatformImageFileFormat.heif) @@ -176,8 +176,8 @@ final class PhotoCaptureTests: XCTestCase { let captureSessionQueue = DispatchQueue(label: "capture_session_queue") captureSessionQueue.setSpecific( - key: fltCaptureSessionQueueSpecificKey, - value: fltCaptureSessionQueueSpecificValue) + key: captureSessionQueueSpecificKey, + value: captureSessionQueueSpecificValue) let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue configuration.captureDeviceFactory = { _ in captureDeviceMock } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index e80259301904..a81872fb63ff 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -849,8 +849,8 @@ class DefaultCamera: NSObject, Camera { ) assert( - DispatchQueue.getSpecific(key: fltCaptureSessionQueueSpecificKey) - == fltCaptureSessionQueueSpecificValue, + DispatchQueue.getSpecific(key: captureSessionQueueSpecificKey) + == captureSessionQueueSpecificValue, "save photo delegate references must be updated on the capture session queue") inProgressSavePhotoDelegates[settings.uniqueID] = savePhotoDelegate capturePhotoOutput.capturePhoto(with: settings, delegate: savePhotoDelegate) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index a999b1712b77..15a935de4002 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -70,8 +70,8 @@ public final class CameraPlugin: NSObject, FlutterPlugin { super.init() captureSessionQueue.setSpecific( - key: fltCaptureSessionQueueSpecificKey, - value: fltCaptureSessionQueueSpecificValue) + key: captureSessionQueueSpecificKey, + value: captureSessionQueueSpecificValue) UIDevice.current.beginGeneratingDeviceOrientationNotifications() NotificationCenter.default.addObserver( diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift index e5f7492bcae5..6968d4d5ce5e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/QueueUtils.swift @@ -5,5 +5,5 @@ import Dispatch /// Queue-specific context data to be associated with the capture session queue. -let fltCaptureSessionQueueSpecificKey = DispatchSpecificKey() -let fltCaptureSessionQueueSpecificValue = "capture_session_queue" +let captureSessionQueueSpecificKey = DispatchSpecificKey() +let captureSessionQueueSpecificValue = "capture_session_queue" From 23e49ea41732abd0f177eccbb67e74a4328a8fe7 Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 14:57:50 +0200 Subject: [PATCH 07/11] Remove captureOutput delegate method --- .../ios/RunnerTests/CameraTestUtils.swift | 8 +++ .../ios/RunnerTests/SampleBufferTests.swift | 63 ++++++++++++++----- .../ios/RunnerTests/StreamingTests.swift | 21 ++++--- .../Sources/camera_avfoundation/Camera.swift | 7 +-- 4 files changed, 73 insertions(+), 26 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index 5669213fdb04..dec257339d1b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -176,6 +176,14 @@ enum CameraTestUtils { return sampleBuffer! } + + static func createTestAudioOutput() -> AVCaptureOutput { + return AVCaptureAudioDataOutput() + } + + static func createTestConnection(_ output: AVCaptureOutput) -> AVCaptureConnection { + return AVCaptureConnection(inputPorts: [], output: output) + } } extension XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index c5fd4b4e5f03..a9e1c5c16ec8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -111,9 +111,13 @@ final class CameraSampleBufferTests: XCTestCase { let (camera, _, _, _) = createCamera() let capturedSampleBuffer = CameraTestUtils.createTestSampleBuffer() let capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer)! + let testConnection = CameraTestUtils.createTestConnection(camera.captureVideoOutput.avOutput) + // Mimic sample buffer callback when captured a new video sample. camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: capturedSampleBuffer) + camera.captureVideoOutput.avOutput, + didOutput: capturedSampleBuffer, + from: testConnection) let deliveredPixelBuffer = camera.copyPixelBuffer()?.takeRetainedValue() XCTAssertEqual( deliveredPixelBuffer, capturedPixelBuffer, @@ -123,6 +127,7 @@ final class CameraSampleBufferTests: XCTestCase { func testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPauseResumeRecording() { let (camera, _, _, _) = createCamera() let sampleBuffer = CameraTestUtils.createTestSampleBuffer() + let testConnection = CameraTestUtils.createTestConnection(camera.captureVideoOutput.avOutput) let initialRetainCount = CFGetRetainCount(sampleBuffer) @@ -132,7 +137,9 @@ final class CameraSampleBufferTests: XCTestCase { camera.resumeVideoRecording() camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: sampleBuffer) + camera.captureVideoOutput.avOutput, + didOutput: sampleBuffer, + from: testConnection) let finalRetainCount = CFGetRetainCount(sampleBuffer) XCTAssertEqual( @@ -153,7 +160,12 @@ final class CameraSampleBufferTests: XCTestCase { } let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) + let audioSample = CameraTestUtils.createTestAudioSampleBuffer() + let testAudioOutput = CameraTestUtils.createTestAudioOutput() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) var writtenSamples: [String] = [] adaptorMock.appendStub = { buffer, time in @@ -167,11 +179,13 @@ final class CameraSampleBufferTests: XCTestCase { } camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) - camera.captureOutput(nil, didOutput: audioSample) - camera.captureOutput(nil, didOutput: audioSample) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) - camera.captureOutput(nil, didOutput: audioSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) let expectedSamples = ["video", "audio"] XCTAssertEqual(writtenSamples, expectedSamples, "First appended sample must be video.") @@ -181,7 +195,12 @@ final class CameraSampleBufferTests: XCTestCase { let (camera, writerMock, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) + let audioSample = CameraTestUtils.createTestAudioSampleBuffer() + let testAudioOutput = CameraTestUtils.createTestAudioOutput() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) var status = AVAssetWriter.Status.unknown writerMock.startWritingStub = { @@ -212,11 +231,15 @@ final class CameraSampleBufferTests: XCTestCase { camera.pauseVideoRecording() camera.resumeVideoRecording() camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) - camera.captureOutput(nil, didOutput: audioSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) - camera.captureOutput(nil, didOutput: audioSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) XCTAssert(videoAppended && audioAppended, "Video or audio was not appended.") } @@ -225,6 +248,8 @@ final class CameraSampleBufferTests: XCTestCase { let (camera, _, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) var sampleAppended = false adaptorMock.appendStub = { buffer, time in @@ -237,13 +262,17 @@ final class CameraSampleBufferTests: XCTestCase { inputMock.readyForMoreMediaData = true sampleAppended = false camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssertTrue(sampleAppended, "Sample was not appended.") inputMock.readyForMoreMediaData = false sampleAppended = false camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssertFalse(sampleAppended, "Sample cannot be appended when readyForMoreMediaData is NO.") } @@ -279,6 +308,8 @@ final class CameraSampleBufferTests: XCTestCase { let (camera, writerMock, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) var startWritingCalled = false writerMock.startWritingStub = { @@ -299,13 +330,17 @@ final class CameraSampleBufferTests: XCTestCase { let startWritingCalledBefore = startWritingCalled camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssert( (startWritingCalledBefore && videoAppended) || (startWritingCalled && !videoAppended), "The startWriting was called between sample creation and appending.") camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutput: videoSample) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssert(videoAppended, "Video was not appended.") } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift index 14c12f15ccf6..e42f0ad06873 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift @@ -35,19 +35,26 @@ private class MockImageStreamHandler: FLTImageStreamHandler { } final class StreamingTests: XCTestCase { - private func createCamera() -> (DefaultCamera, CMSampleBuffer) { + private func createCamera() -> ( + DefaultCamera, + AVCaptureOutput, + CMSampleBuffer, + AVCaptureConnection + ) { let captureSessionQueue = DispatchQueue(label: "testing") let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue let camera = CameraTestUtils.createTestCamera(configuration) + let testAudioOutput = CameraTestUtils.createTestAudioOutput() let sampleBuffer = CameraTestUtils.createTestSampleBuffer() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) - return (camera, sampleBuffer) + return (camera, testAudioOutput, sampleBuffer, testAudioConnection) } func testExceedMaxStreamingPendingFramesCount() { - let (camera, sampleBuffer) = createCamera() + let (camera, testAudioOutput, sampleBuffer, testAudioConnection) = createCamera() let streamingExpectation = expectation( description: "Must not call handler over maxStreamingPendingFramesCount") let handlerMock = MockImageStreamHandler() @@ -62,14 +69,14 @@ final class StreamingTests: XCTestCase { streamingExpectation.expectedFulfillmentCount = 4 for _ in 0..<10 { - camera.captureOutput(nil, didOutput: sampleBuffer) + camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection) } waitForExpectations(timeout: 30, handler: nil) } func testReceivedImageStreamData() { - let (camera, sampleBuffer) = createCamera() + let (camera, testAudioOutput, sampleBuffer, testAudioConnection) = createCamera() let streamingExpectation = expectation( description: "Must be able to call the handler again when receivedImageStreamData is called") let handlerMock = MockImageStreamHandler() @@ -84,11 +91,11 @@ final class StreamingTests: XCTestCase { streamingExpectation.expectedFulfillmentCount = 5 for _ in 0..<10 { - camera.captureOutput(nil, didOutput: sampleBuffer) + camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection) } camera.receivedImageStreamData() - camera.captureOutput(nil, didOutput: sampleBuffer) + camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection) waitForExpectations(timeout: 30, handler: nil) } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index a81872fb63ff..67bfe7be92fc 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -1287,13 +1287,10 @@ class DefaultCamera: NSObject, Camera { } func captureOutput( - _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection ) { - captureOutput(output, didOutput: sampleBuffer) - } - - func captureOutput(_ output: AVCaptureOutput?, didOutput sampleBuffer: CMSampleBuffer) { if output == captureVideoOutput.avOutput { if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { From e744fd6ffb2c587906542748a7528c52d199bc2a Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 15:04:54 +0200 Subject: [PATCH 08/11] Extact DefaultCamera to separate file --- .../Sources/camera_avfoundation/Camera.swift | 1443 ---------------- .../camera_avfoundation/DefaultCamera.swift | 1453 +++++++++++++++++ 2 files changed, 1453 insertions(+), 1443 deletions(-) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index 67bfe7be92fc..7fdde60680de 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -99,1446 +99,3 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, func close() } - -class DefaultCamera: NSObject, Camera { - var dartAPI: FCPCameraEventApi? - var onFrameAvailable: (() -> Void)? - - var videoFormat: FourCharCode = kCVPixelFormatType_32BGRA { - didSet { - captureVideoOutput.videoSettings = [ - kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: videoFormat) - ] - } - } - - private(set) var isPreviewPaused = false - - var minimumExposureOffset: CGFloat { CGFloat(captureDevice.minExposureTargetBias) } - var maximumExposureOffset: CGFloat { CGFloat(captureDevice.maxExposureTargetBias) } - var minimumAvailableZoomFactor: CGFloat { captureDevice.minAvailableVideoZoomFactor } - var maximumAvailableZoomFactor: CGFloat { captureDevice.maxAvailableVideoZoomFactor } - - /// The queue on which `latestPixelBuffer` property is accessed. - /// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`. - private let pixelBufferSynchronizationQueue = DispatchQueue( - label: "io.flutter.camera.pixelBufferSynchronizationQueue") - - /// The queue on which captured photos (not videos) are written to disk. - /// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation. - private let photoIOQueue = DispatchQueue(label: "io.flutter.camera.photoIOQueue") - - /// All FLTCam's state access and capture session related operations should be run on this queue. - private let captureSessionQueue: DispatchQueue - - private let mediaSettings: FCPPlatformMediaSettings - private let mediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper - - private let videoCaptureSession: FLTCaptureSession - private let audioCaptureSession: FLTCaptureSession - - /// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. - private let captureDeviceFactory: CaptureDeviceFactory - private let audioCaptureDeviceFactory: AudioCaptureDeviceFactory - private let captureDeviceInputFactory: FLTCaptureDeviceInputFactory - private let assetWriterFactory: AssetWriterFactory - private let inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory - - /// A wrapper for CMVideoFormatDescriptionGetDimensions. - /// Allows for alternate implementations in tests. - private let videoDimensionsForFormat: VideoDimensionsForFormat - - private let deviceOrientationProvider: FLTDeviceOrientationProviding - private let motionManager = CMMotionManager() - - private(set) var captureDevice: FLTCaptureDevice - // Setter exposed for tests. - var captureVideoOutput: FLTCaptureVideoDataOutput - // Setter exposed for tests. - var capturePhotoOutput: FLTCapturePhotoOutput - private var captureVideoInput: FLTCaptureInput - - private var videoWriter: FLTAssetWriter? - private var videoWriterInput: FLTAssetWriterInput? - private var audioWriterInput: FLTAssetWriterInput? - private var assetWriterPixelBufferAdaptor: FLTAssetWriterInputPixelBufferAdaptor? - private var videoAdaptor: FLTAssetWriterInputPixelBufferAdaptor? - - /// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the - /// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the - /// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo - /// capture operations may overlap, so FLTCam has to keep track of multiple delegates in progress, - /// instead of just a single delegate reference. - private(set) var inProgressSavePhotoDelegates = [Int64: FLTSavePhotoDelegate]() - - private var imageStreamHandler: FLTImageStreamHandler? - - private var textureId: Int64? - private var previewSize: CGSize? - private var deviceOrientation = UIDeviceOrientation.unknown - - /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. - /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. - private var latestPixelBuffer: CVPixelBuffer? - - private var videoRecordingPath: String? - private var isRecording = false - private var isRecordingPaused = false - private var isFirstVideoSample = false - private var videoIsDisconnected = false - private var audioIsDisconnected = false - private var isAudioSetup = false - private var lastVideoSampleTime = CMTime.zero - private var lastAudioSampleTime = CMTime.zero - private var videoTimeOffset = CMTime.zero - private var audioTimeOffset = CMTime.zero - - /// True when images from the camera are being streamed. - private(set) var isStreamingImages = false - - /// Number of frames currently pending processing. - private var streamingPendingFramesCount = 0 - - /// Maximum number of frames pending processing. - /// To limit memory consumption, limit the number of frames pending processing. - /// After some testing, 4 was determined to be the best maximuńm value. - /// https://github.com/flutter/plugins/pull/4520#discussion_r766335637 - private var maxStreamingPendingFramesCount = 4 - - private var fileFormat = FCPPlatformImageFileFormat.jpeg - private var lockedCaptureOrientation = UIDeviceOrientation.unknown - private var exposureMode = FCPPlatformExposureMode.auto - private var focusMode = FCPPlatformFocusMode.auto - private var flashMode: FCPPlatformFlashMode - - private static func flutterErrorFromNSError(_ error: NSError) -> FlutterError { - return FlutterError( - code: "Error \(error.code)", - message: error.localizedDescription, - details: error.domain) - } - - // Returns frame rate supported by format closest to targetFrameRate. - private static func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double) - -> Double - { - var bestFrameRate = 0.0 - var minDistance = Double.greatestFiniteMagnitude - for range in format.videoSupportedFrameRateRanges { - let frameRate = min( - max(targetFrameRate, Double(range.minFrameRate)), Double(range.maxFrameRate)) - let distance = abs(frameRate - targetFrameRate) - if distance < minDistance { - bestFrameRate = frameRate - minDistance = distance - } - } - return bestFrameRate - } - - // Finds format with same resolution as current activeFormat in captureDevice for which - // bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. - // Preferred are formats with the same subtype as current activeFormat. Sets this format - // as activeFormat and also updates mediaSettings.framesPerSecond to value which - // bestFrameRateForFormat returned for that format. - private static func selectBestFormatForRequestedFrameRate( - captureDevice: FLTCaptureDevice, - mediaSettings: FCPPlatformMediaSettings, - targetFrameRate: Double, - videoDimensionsForFormat: (FLTCaptureDeviceFormat) -> CMVideoDimensions - ) { - let targetResolution = videoDimensionsForFormat(captureDevice.activeFormat) - let preferredSubType = CMFormatDescriptionGetMediaSubType( - captureDevice.activeFormat.formatDescription) - var bestFormat = captureDevice.activeFormat - var _bestFrameRate = bestFrameRate(for: bestFormat, targetFrameRate: targetFrameRate) - var minDistance = abs(_bestFrameRate - targetFrameRate) - var isBestSubTypePreferred = true - - for format in captureDevice.formats { - let resolution = videoDimensionsForFormat(format) - if resolution.width != targetResolution.width || resolution.height != targetResolution.height - { - continue - } - let frameRate = bestFrameRate(for: format, targetFrameRate: targetFrameRate) - let distance = abs(frameRate - targetFrameRate) - let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) - let isSubTypePreferred = subType == preferredSubType - if distance < minDistance - || (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred) - { - bestFormat = format - _bestFrameRate = frameRate - minDistance = distance - isBestSubTypePreferred = isSubTypePreferred - } - } - captureDevice.activeFormat = bestFormat - mediaSettings.framesPerSecond = NSNumber(value: _bestFrameRate) - } - - private static func createConnection( - captureDevice: FLTCaptureDevice, - videoFormat: FourCharCode, - captureDeviceInputFactory: FLTCaptureDeviceInputFactory - ) throws -> (FLTCaptureInput, FLTCaptureVideoDataOutput, AVCaptureConnection) { - // Setup video capture input. - let captureVideoInput = try captureDeviceInputFactory.deviceInput(with: captureDevice) - - // Setup video capture output. - let captureVideoOutput = FLTDefaultCaptureVideoDataOutput( - captureVideoOutput: AVCaptureVideoDataOutput()) - captureVideoOutput.videoSettings = [ - kCVPixelBufferPixelFormatTypeKey as String: videoFormat as Any - ] - captureVideoOutput.alwaysDiscardsLateVideoFrames = true - - // Setup video capture connection. - let connection = AVCaptureConnection( - inputPorts: captureVideoInput.ports, - output: captureVideoOutput.avOutput) - - if captureDevice.position == .front { - connection.isVideoMirrored = true - } - - return (captureVideoInput, captureVideoOutput, connection) - } - - init(configuration: FLTCamConfiguration) throws { - captureSessionQueue = configuration.captureSessionQueue - mediaSettings = configuration.mediaSettings - mediaSettingsAVWrapper = configuration.mediaSettingsWrapper - videoCaptureSession = configuration.videoCaptureSession - audioCaptureSession = configuration.audioCaptureSession - captureDeviceFactory = configuration.captureDeviceFactory - audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory - captureDeviceInputFactory = configuration.captureDeviceInputFactory - assetWriterFactory = configuration.assetWriterFactory - inputPixelBufferAdaptorFactory = configuration.inputPixelBufferAdaptorFactory - videoDimensionsForFormat = configuration.videoDimensionsForFormat - deviceOrientationProvider = configuration.deviceOrientationProvider - - captureDevice = captureDeviceFactory(configuration.initialCameraName) - flashMode = captureDevice.hasFlash ? .auto : .off - - capturePhotoOutput = FLTDefaultCapturePhotoOutput(photoOutput: AVCapturePhotoOutput()) - capturePhotoOutput.highResolutionCaptureEnabled = true - - videoCaptureSession.automaticallyConfiguresApplicationAudioSession = false - audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false - - deviceOrientation = configuration.orientation - - let connection: AVCaptureConnection - (captureVideoInput, captureVideoOutput, connection) = try DefaultCamera.createConnection( - captureDevice: captureDevice, - videoFormat: videoFormat, - captureDeviceInputFactory: configuration.captureDeviceInputFactory) - - super.init() - - captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) - - videoCaptureSession.addInputWithNoConnections(captureVideoInput) - videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) - videoCaptureSession.addConnection(connection) - - videoCaptureSession.addOutput(capturePhotoOutput.avOutput) - - motionManager.startAccelerometerUpdates() - - if let targetFrameRate = mediaSettings.framesPerSecond { - // The frame rate can be changed only on a locked for configuration device. - try mediaSettingsAVWrapper.lockDevice(captureDevice) - mediaSettingsAVWrapper.beginConfiguration(for: videoCaptureSession) - - // Possible values for presets are hard-coded in FLT interface having - // corresponding AVCaptureSessionPreset counterparts. - // If _resolutionPreset is not supported by camera there is - // fallback to lower resolution presets. - // If none can be selected there is error condition. - do { - try setCaptureSessionPreset(mediaSettings.resolutionPreset) - } catch { - videoCaptureSession.commitConfiguration() - captureDevice.unlockForConfiguration() - throw error - } - - DefaultCamera.selectBestFormatForRequestedFrameRate( - captureDevice: captureDevice, - mediaSettings: mediaSettings, - targetFrameRate: targetFrameRate.doubleValue, - videoDimensionsForFormat: videoDimensionsForFormat - ) - - if let framesPerSecond = mediaSettings.framesPerSecond { - // Set frame rate with 1/10 precision allowing non-integral values. - let fpsNominator = floor(framesPerSecond.doubleValue * 10.0) - let duration = CMTimeMake(value: 10, timescale: Int32(fpsNominator)) - - mediaSettingsAVWrapper.setMinFrameDuration(duration, on: captureDevice) - mediaSettingsAVWrapper.setMaxFrameDuration(duration, on: captureDevice) - } - - mediaSettingsAVWrapper.commitConfiguration(for: videoCaptureSession) - mediaSettingsAVWrapper.unlockDevice(captureDevice) - } else { - // If the frame rate is not important fall to a less restrictive - // behavior (no configuration locking). - try setCaptureSessionPreset(mediaSettings.resolutionPreset) - } - - updateOrientation() - } - - private func setCaptureSessionPreset( - _ resolutionPreset: FCPPlatformResolutionPreset - ) throws { - switch resolutionPreset { - case .max: - if let bestFormat = highestResolutionFormat(forCaptureDevice: captureDevice) { - videoCaptureSession.sessionPreset = .inputPriority - if (try? captureDevice.lockForConfiguration()) != nil { - // Set the best device format found and finish the device configuration. - captureDevice.activeFormat = bestFormat - captureDevice.unlockForConfiguration() - break - } - } - fallthrough - case .ultraHigh: - if videoCaptureSession.canSetSessionPreset(.hd4K3840x2160) { - videoCaptureSession.sessionPreset = .hd4K3840x2160 - break - } - if videoCaptureSession.canSetSessionPreset(.high) { - videoCaptureSession.sessionPreset = .high - break - } - fallthrough - case .veryHigh: - if videoCaptureSession.canSetSessionPreset(.hd1920x1080) { - videoCaptureSession.sessionPreset = .hd1920x1080 - break - } - fallthrough - case .high: - if videoCaptureSession.canSetSessionPreset(.hd1280x720) { - videoCaptureSession.sessionPreset = .hd1280x720 - break - } - fallthrough - case .medium: - if videoCaptureSession.canSetSessionPreset(.vga640x480) { - videoCaptureSession.sessionPreset = .vga640x480 - break - } - fallthrough - case .low: - if videoCaptureSession.canSetSessionPreset(.cif352x288) { - videoCaptureSession.sessionPreset = .cif352x288 - break - } - fallthrough - default: - if videoCaptureSession.canSetSessionPreset(.low) { - videoCaptureSession.sessionPreset = .low - } else { - throw NSError( - domain: NSCocoaErrorDomain, - code: URLError.unknown.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "No capture session available for current capture session." - ]) - } - } - - let size = videoDimensionsForFormat(captureDevice.activeFormat) - previewSize = CGSize(width: CGFloat(size.width), height: CGFloat(size.height)) - audioCaptureSession.sessionPreset = videoCaptureSession.sessionPreset - } - - /// Finds the highest available resolution in terms of pixel count for the given device. - /// Preferred are formats with the same subtype as current activeFormat. - private func highestResolutionFormat(forCaptureDevice captureDevice: FLTCaptureDevice) - -> FLTCaptureDeviceFormat? - { - let preferredSubType = CMFormatDescriptionGetMediaSubType( - captureDevice.activeFormat.formatDescription) - var bestFormat: FLTCaptureDeviceFormat? = nil - var maxPixelCount: UInt = 0 - var isBestSubTypePreferred = false - - for format in captureDevice.formats { - let resolution = videoDimensionsForFormat(format) - let height = UInt(resolution.height) - let width = UInt(resolution.width) - let pixelCount = height * width - let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) - let isSubTypePreferred = subType == preferredSubType - - if pixelCount > maxPixelCount - || (pixelCount == maxPixelCount && isSubTypePreferred && !isBestSubTypePreferred) - { - bestFormat = format - maxPixelCount = pixelCount - isBestSubTypePreferred = isSubTypePreferred - } - } - - return bestFormat - } - - func setUpCaptureSessionForAudioIfNeeded() { - // Don't setup audio twice or we will lose the audio. - guard !mediaSettings.enableAudio || !isAudioSetup else { return } - - let audioDevice = audioCaptureDeviceFactory() - do { - // Create a device input with the device and add it to the session. - // Setup the audio input. - let audioInput = try captureDeviceInputFactory.deviceInput(with: audioDevice) - - // Setup the audio output. - let audioOutput = AVCaptureAudioDataOutput() - - let block = { - // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other - // plugins like video_player. - DefaultCamera.upgradeAudioSessionCategory( - requestedCategory: .playAndRecord, - options: [.defaultToSpeaker, .allowBluetoothA2DP, .allowAirPlay] - ) - } - - if !Thread.isMainThread { - DispatchQueue.main.sync(execute: block) - } else { - block() - } - - if audioCaptureSession.canAddInput(audioInput) { - audioCaptureSession.addInput(audioInput) - - if audioCaptureSession.canAddOutput(audioOutput) { - audioCaptureSession.addOutput(audioOutput) - audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) - isAudioSetup = true - } else { - reportErrorMessage("Unable to add Audio input/output to session capture") - isAudioSetup = false - } - } - } catch let error as NSError { - reportErrorMessage(error.description) - } - } - - // This function, although slightly modified, is also in video_player_avfoundation. - // Both need to do the same thing and run on the same thread (for example main thread). - // Configure application wide audio session manually to prevent overwriting flag - // MixWithOthers by capture session. - // Only change category if it is considered an upgrade which means it can only enable - // ability to play in silent mode or ability to record audio but never disables it, - // that could affect other plugins which depend on this global state. Only change - // category or options if there is change to prevent unnecessary lags and silence. - private static func upgradeAudioSessionCategory( - requestedCategory: AVAudioSession.Category, - options: AVAudioSession.CategoryOptions - ) { - let playCategories: Set = [.playback, .playAndRecord] - let recordCategories: Set = [.record, .playAndRecord] - let requiredCategories: Set = [ - requestedCategory, AVAudioSession.sharedInstance().category, - ] - - let requiresPlay = !requiredCategories.isDisjoint(with: playCategories) - let requiresRecord = !requiredCategories.isDisjoint(with: recordCategories) - - var finalCategory = requestedCategory - if requiresPlay && requiresRecord { - finalCategory = .playAndRecord - } else if requiresPlay { - finalCategory = .playback - } else if requiresRecord { - finalCategory = .record - } - - let finalOptions = AVAudioSession.sharedInstance().categoryOptions.union(options) - - if finalCategory == AVAudioSession.sharedInstance().category - && finalOptions == AVAudioSession.sharedInstance().categoryOptions - { - return - } - - try? AVAudioSession.sharedInstance().setCategory(finalCategory, options: finalOptions) - } - - func reportInitializationState() { - // Get all the state on the current thread, not the main thread. - let state = FCPPlatformCameraState.make( - withPreviewSize: FCPPlatformSize.make( - withWidth: Double(previewSize!.width), - height: Double(previewSize!.height) - ), - exposureMode: exposureMode, - focusMode: focusMode, - exposurePointSupported: captureDevice.isExposurePointOfInterestSupported, - focusPointSupported: captureDevice.isFocusPointOfInterestSupported - ) - - FLTEnsureToRunOnMainQueue { [weak self] in - self?.dartAPI?.initialized(with: state) { _ in - // Ignore any errors, as this is just an event broadcast. - } - } - } - - func receivedImageStreamData() { - streamingPendingFramesCount -= 1 - } - - func start() { - videoCaptureSession.startRunning() - audioCaptureSession.startRunning() - } - - func stop() { - videoCaptureSession.stopRunning() - audioCaptureSession.stopRunning() - } - - func startVideoRecording( - completion: @escaping (FlutterError?) -> Void, - messengerForStreaming messenger: FlutterBinaryMessenger? - ) { - guard !isRecording else { - completion( - FlutterError( - code: "Error", - message: "Video is already recording", - details: nil)) - return - } - - if let messenger = messenger { - startImageStream(with: messenger) - } - - let videoRecordingPath: String - do { - videoRecordingPath = try getTemporaryFilePath( - withExtension: "mp4", - subfolder: "videos", - prefix: "REC_") - self.videoRecordingPath = videoRecordingPath - } catch let error as NSError { - completion(DefaultCamera.flutterErrorFromNSError(error)) - return - } - - guard setupWriter(forPath: videoRecordingPath) else { - completion( - FlutterError( - code: "IOError", - message: "Setup Writer Failed", - details: nil)) - return - } - - // startWriting should not be called in didOutputSampleBuffer where it can cause state - // in which _isRecording is YES but _videoWriter.status is AVAssetWriterStatusUnknown - // in stopVideoRecording if it is called after startVideoRecording but before - // didOutputSampleBuffer had chance to call startWriting and lag at start of video - // https://github.com/flutter/flutter/issues/132016 - // https://github.com/flutter/flutter/issues/151319 - videoWriter?.startWriting() - isFirstVideoSample = true - isRecording = true - isRecordingPaused = false - videoTimeOffset = CMTimeMake(value: 0, timescale: 1) - audioTimeOffset = CMTimeMake(value: 0, timescale: 1) - videoIsDisconnected = false - audioIsDisconnected = false - completion(nil) - } - - private func setupWriter(forPath path: String) -> Bool { - setUpCaptureSessionForAudioIfNeeded() - - var error: NSError? - videoWriter = assetWriterFactory(URL(fileURLWithPath: path), AVFileType.mp4, &error) - - guard let videoWriter = videoWriter else { - if let error = error { - reportErrorMessage(error.description) - } - return false - } - - var videoSettings = mediaSettingsAVWrapper.recommendedVideoSettingsForAssetWriter( - withFileType: - AVFileType.mp4, - for: captureVideoOutput - ) - - if mediaSettings.videoBitrate != nil || mediaSettings.framesPerSecond != nil { - var compressionProperties: [String: Any] = [:] - - if let videoBitrate = mediaSettings.videoBitrate { - compressionProperties[AVVideoAverageBitRateKey] = videoBitrate - } - - if let framesPerSecond = mediaSettings.framesPerSecond { - compressionProperties[AVVideoExpectedSourceFrameRateKey] = framesPerSecond - } - - videoSettings?[AVVideoCompressionPropertiesKey] = compressionProperties - } - - let videoWriterInput = mediaSettingsAVWrapper.assetWriterVideoInput( - withOutputSettings: videoSettings) - self.videoWriterInput = videoWriterInput - - let sourcePixelBufferAttributes: [String: Any] = [ - kCVPixelBufferPixelFormatTypeKey as String: videoFormat - ] - - videoAdaptor = inputPixelBufferAdaptorFactory(videoWriterInput, sourcePixelBufferAttributes) - - videoWriterInput.expectsMediaDataInRealTime = true - - // Add the audio input - if mediaSettings.enableAudio { - var acl = AudioChannelLayout() - acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono - - let aclSize = MemoryLayout.size(ofValue: acl) - let aclData = Data(bytes: &acl, count: aclSize) - - var audioSettings: [String: Any] = [ - AVFormatIDKey: kAudioFormatMPEG4AAC, - AVSampleRateKey: 44100.0, - AVNumberOfChannelsKey: 1, - AVChannelLayoutKey: aclData, - ] - - if let audioBitrate = mediaSettings.audioBitrate { - audioSettings[AVEncoderBitRateKey] = audioBitrate - } - - let newAudioWriterInput = mediaSettingsAVWrapper.assetWriterAudioInput( - withOutputSettings: audioSettings) - newAudioWriterInput.expectsMediaDataInRealTime = true - mediaSettingsAVWrapper.addInput(newAudioWriterInput, to: videoWriter) - self.audioWriterInput = newAudioWriterInput - } - - if flashMode == .torch { - try? captureDevice.lockForConfiguration() - captureDevice.torchMode = .on - captureDevice.unlockForConfiguration() - } - - mediaSettingsAVWrapper.addInput(videoWriterInput, to: videoWriter) - - captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) - - return true - } - - func pauseVideoRecording() { - isRecordingPaused = true - videoIsDisconnected = true - audioIsDisconnected = true - } - - func resumeVideoRecording() { - isRecordingPaused = false - } - - func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { - if isRecording { - isRecording = false - - // When `isRecording` is true, `startWriting` was already called, so `videoWriter.status` - // is always either `.writing` or `.failed`. `finishWriting` does not throw exceptions, - // so there is no need to check `videoWriter.status` beforehand. - videoWriter?.finishWriting { - if self.videoWriter?.status == .completed { - self.updateOrientation() - completion(self.videoRecordingPath, nil) - self.videoRecordingPath = nil - } else { - completion( - nil, - FlutterError( - code: "IOError", - message: "AVAssetWriter could not finish writing!", - details: nil)) - } - } - } else { - let error = NSError( - domain: NSCocoaErrorDomain, - code: URLError.resourceUnavailable.rawValue, - userInfo: [NSLocalizedDescriptionKey: "Video is not recording!"] - ) - completion(nil, DefaultCamera.flutterErrorFromNSError(error)) - } - } - - func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { - var settings = AVCapturePhotoSettings() - - if mediaSettings.resolutionPreset == .max { - settings.isHighResolutionPhotoEnabled = true - } - - var fileExtension: String - - let isHEVCCodecAvailable = capturePhotoOutput.availablePhotoCodecTypes.contains( - AVVideoCodecType.hevc) - - if fileFormat == .heif, isHEVCCodecAvailable { - settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) - fileExtension = "heif" - } else { - fileExtension = "jpg" - } - - if flashMode != .torch { - settings.flashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(flashMode) - } - - let path: String - do { - path = try getTemporaryFilePath( - withExtension: fileExtension, - subfolder: "pictures", - prefix: "CAP_") - } catch let error as NSError { - completion(nil, DefaultCamera.flutterErrorFromNSError(error)) - return - } - - let savePhotoDelegate = FLTSavePhotoDelegate( - path: path, - ioQueue: photoIOQueue, - completionHandler: { [weak self] path, error in - guard let strongSelf = self else { return } - - strongSelf.captureSessionQueue.async { - if let strongSelf = self { - strongSelf.inProgressSavePhotoDelegates.removeValue( - forKey: settings.uniqueID) - } - } - - if let error = error { - completion(nil, DefaultCamera.flutterErrorFromNSError(error as NSError)) - } else { - assert(path != nil, "Path must not be nil if no error.") - completion(path, nil) - } - } - ) - - assert( - DispatchQueue.getSpecific(key: captureSessionQueueSpecificKey) - == captureSessionQueueSpecificValue, - "save photo delegate references must be updated on the capture session queue") - inProgressSavePhotoDelegates[settings.uniqueID] = savePhotoDelegate - capturePhotoOutput.capturePhoto(with: settings, delegate: savePhotoDelegate) - - } - - private func getTemporaryFilePath(withExtension ext: String, subfolder: String, prefix: String) - throws - -> String - { - let docDir = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] - let fileDir = docDir.appendingPathComponent("camera").appendingPathComponent(subfolder) - let fileName = prefix + UUID().uuidString - let file = fileDir.appendingPathComponent(fileName).appendingPathExtension(ext).path - - let fileManager = FileManager.default - if !fileManager.fileExists(atPath: fileDir.path) { - try fileManager.createDirectory( - at: fileDir, - withIntermediateDirectories: true, - attributes: nil) - } - - return file - } - - func setDeviceOrientation(_ orientation: UIDeviceOrientation) { - if deviceOrientation == orientation { - return - } - - deviceOrientation = orientation - updateOrientation() - } - - private func updateOrientation() { - guard !isRecording else { return } - - let orientation: UIDeviceOrientation = - (lockedCaptureOrientation != .unknown) - ? lockedCaptureOrientation - : deviceOrientation - - updateOrientation(orientation, forCaptureOutput: capturePhotoOutput) - updateOrientation(orientation, forCaptureOutput: captureVideoOutput) - } - - private func updateOrientation( - _ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: FLTCaptureOutput - ) { - if let connection = captureOutput.connection(withMediaType: .video), - connection.isVideoOrientationSupported - { - connection.videoOrientation = getVideoOrientation(forDeviceOrientation: orientation) - } - } - - private func getVideoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation) - -> AVCaptureVideoOrientation - { - switch deviceOrientation { - case .portrait: - return .portrait - case .landscapeLeft: - return .landscapeRight - case .landscapeRight: - return .landscapeLeft - case .portraitUpsideDown: - return .portraitUpsideDown - default: - return .portrait - } - } - - func lockCaptureOrientation(_ pigeonOrientation: FCPPlatformDeviceOrientation) { - let orientation = FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation) - if lockedCaptureOrientation != orientation { - lockedCaptureOrientation = orientation - updateOrientation() - } - } - - func unlockCaptureOrientation() { - lockedCaptureOrientation = .unknown - updateOrientation() - } - - func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { - self.fileFormat = fileFormat - } - - func setExposureMode(_ mode: FCPPlatformExposureMode) { - exposureMode = mode - applyExposureMode() - } - - private func applyExposureMode() { - try? captureDevice.lockForConfiguration() - switch exposureMode { - case .locked: - // AVCaptureExposureMode.autoExpose automatically adjusts the exposure one time, and then locks exposure for the device - captureDevice.setExposureMode(.autoExpose) - case .auto: - if captureDevice.isExposureModeSupported(.continuousAutoExposure) { - captureDevice.setExposureMode(.continuousAutoExposure) - } else { - captureDevice.setExposureMode(.autoExpose) - } - @unknown default: - assertionFailure("Unknown exposure mode") - } - captureDevice.unlockForConfiguration() - } - - func setExposureOffset(_ offset: Double) { - try? captureDevice.lockForConfiguration() - captureDevice.setExposureTargetBias(Float(offset), completionHandler: nil) - captureDevice.unlockForConfiguration() - } - - func setExposurePoint( - _ point: FCPPlatformPoint?, withCompletion completion: @escaping (FlutterError?) -> Void - ) { - guard captureDevice.isExposurePointOfInterestSupported else { - completion( - FlutterError( - code: "setExposurePointFailed", - message: "Device does not have exposure point capabilities", - details: nil)) - return - } - - let orientation = UIDevice.current.orientation - try? captureDevice.lockForConfiguration() - // A nil point resets to the center. - let exposurePoint = cgPoint( - for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) - captureDevice.setExposurePointOfInterest(exposurePoint) - captureDevice.unlockForConfiguration() - // Retrigger auto exposure - applyExposureMode() - completion(nil) - } - - func setFocusMode(_ mode: FCPPlatformFocusMode) { - focusMode = mode - applyFocusMode() - } - - func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { - guard captureDevice.isFocusPointOfInterestSupported else { - completion( - FlutterError( - code: "setFocusPointFailed", - message: "Device does not have focus point capabilities", - details: nil)) - return - } - - let orientation = deviceOrientationProvider.orientation() - try? captureDevice.lockForConfiguration() - // A nil point resets to the center. - captureDevice.setFocusPointOfInterest( - cgPoint(for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) - ) - captureDevice.unlockForConfiguration() - // Retrigger auto focus - applyFocusMode() - completion(nil) - } - - private func applyFocusMode() { - applyFocusMode(focusMode, onDevice: captureDevice) - } - - private func applyFocusMode( - _ focusMode: FCPPlatformFocusMode, onDevice captureDevice: FLTCaptureDevice - ) { - try? captureDevice.lockForConfiguration() - switch focusMode { - case .locked: - // AVCaptureFocusMode.autoFocus automatically adjusts the focus one time, and then locks focus - if captureDevice.isFocusModeSupported(.autoFocus) { - captureDevice.setFocusMode(.autoFocus) - } - case .auto: - if captureDevice.isFocusModeSupported(.continuousAutoFocus) { - captureDevice.setFocusMode(.continuousAutoFocus) - } else if captureDevice.isFocusModeSupported(.autoFocus) { - captureDevice.setFocusMode(.autoFocus) - } - @unknown default: - assertionFailure("Unknown focus mode") - } - captureDevice.unlockForConfiguration() - } - - private func cgPoint( - for point: FCPPlatformPoint, withOrientation orientation: UIDeviceOrientation - ) - -> CGPoint - { - var x = point.x - var y = point.y - switch orientation { - case .portrait: // 90 ccw - y = 1 - point.x - x = point.y - case .portraitUpsideDown: // 90 cw - x = 1 - point.y - y = point.x - case .landscapeRight: // 180 - x = 1 - point.x - y = 1 - point.y - case .landscapeLeft: - // No rotation required - break - default: - // No rotation required - break - } - return CGPoint(x: x, y: y) - } - - func setZoomLevel(_ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void) { - if zoom < captureDevice.minAvailableVideoZoomFactor - || zoom > captureDevice.maxAvailableVideoZoomFactor - { - completion( - FlutterError( - code: "ZOOM_ERROR", - message: - "Zoom level out of bounds (zoom level should be between \(captureDevice.minAvailableVideoZoomFactor) and \(captureDevice.maxAvailableVideoZoomFactor).", - details: nil)) - return - } - - do { - try captureDevice.lockForConfiguration() - } catch let error as NSError { - completion(DefaultCamera.flutterErrorFromNSError(error)) - return - } - - captureDevice.videoZoomFactor = zoom - captureDevice.unlockForConfiguration() - completion(nil) - } - - func setFlashMode( - _ mode: FCPPlatformFlashMode, withCompletion completion: @escaping (FlutterError?) -> Void - ) { - if mode == .torch { - guard captureDevice.hasTorch else { - completion( - FlutterError( - code: "setFlashModeFailed", - message: "Device does not support torch mode", - details: nil) - ) - return - } - guard captureDevice.isTorchAvailable else { - completion( - FlutterError( - code: "setFlashModeFailed", - message: "Torch mode is currently not available", - details: nil)) - return - } - if captureDevice.torchMode != .on { - try? captureDevice.lockForConfiguration() - captureDevice.torchMode = .on - captureDevice.unlockForConfiguration() - } - } else { - guard captureDevice.hasFlash else { - completion( - FlutterError( - code: "setFlashModeFailed", - message: "Device does not have flash capabilities", - details: nil)) - return - } - let avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode) - guard capturePhotoOutput.supportedFlashModes.contains(NSNumber(value: avFlashMode.rawValue)) - else { - completion( - FlutterError( - code: "setFlashModeFailed", - message: "Device does not support this specific flash mode", - details: nil)) - return - } - if captureDevice.torchMode != .off { - try? captureDevice.lockForConfiguration() - captureDevice.torchMode = .off - captureDevice.unlockForConfiguration() - } - } - flashMode = mode - completion(nil) - } - - func pausePreview() { - isPreviewPaused = true - } - - func resumePreview() { - isPreviewPaused = false - } - - func setDescriptionWhileRecording( - _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void - ) { - guard isRecording else { - completion( - FlutterError( - code: "setDescriptionWhileRecordingFailed", - message: "Device was not recording", - details: nil)) - return - } - - captureDevice = captureDeviceFactory(cameraName) - - let oldConnection = captureVideoOutput.connection(withMediaType: .video) - - // Stop video capture from the old output. - captureVideoOutput.setSampleBufferDelegate(nil, queue: nil) - - // Remove the old video capture connections. - videoCaptureSession.beginConfiguration() - videoCaptureSession.removeInput(captureVideoInput) - videoCaptureSession.removeOutput(captureVideoOutput.avOutput) - - let newConnection: AVCaptureConnection - - do { - (captureVideoInput, captureVideoOutput, newConnection) = try DefaultCamera.createConnection( - captureDevice: captureDevice, - videoFormat: videoFormat, - captureDeviceInputFactory: captureDeviceInputFactory) - - captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) - } catch { - completion( - FlutterError( - code: "VideoError", - message: "Unable to create video connection", - details: nil)) - return - } - - // Keep the same orientation the old connections had. - if let oldConnection = oldConnection, newConnection.isVideoOrientationSupported { - newConnection.videoOrientation = oldConnection.videoOrientation - } - - // Add the new connections to the session. - if !videoCaptureSession.canAddInput(captureVideoInput) { - completion( - FlutterError( - code: "VideoError", - message: "Unable to switch video input", - details: nil)) - } - videoCaptureSession.addInputWithNoConnections(captureVideoInput) - - if !videoCaptureSession.canAddOutput(captureVideoOutput.avOutput) { - completion( - FlutterError( - code: "VideoError", - message: "Unable to switch video output", - details: nil)) - } - videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) - - if !videoCaptureSession.canAddConnection(newConnection) { - completion( - FlutterError( - code: "VideoError", - message: "Unable to switch video connection", - details: nil)) - } - videoCaptureSession.addConnection(newConnection) - videoCaptureSession.commitConfiguration() - - completion(nil) - } - - func startImageStream(with messenger: FlutterBinaryMessenger) { - startImageStream( - with: messenger, - imageStreamHandler: FLTImageStreamHandler(captureSessionQueue: captureSessionQueue) - ) - } - - func startImageStream( - with messenger: FlutterBinaryMessenger, - imageStreamHandler: FLTImageStreamHandler - ) { - if isStreamingImages { - reportErrorMessage("Images from camera are already streaming!") - return - } - - let eventChannel = FlutterEventChannel( - name: "plugins.flutter.io/camera_avfoundation/imageStream", - binaryMessenger: messenger - ) - let threadSafeEventChannel = FLTThreadSafeEventChannel(eventChannel: eventChannel) - - self.imageStreamHandler = imageStreamHandler - threadSafeEventChannel.setStreamHandler(imageStreamHandler) { [weak self] in - guard let strongSelf = self else { return } - - strongSelf.captureSessionQueue.async { [weak self] in - guard let strongSelf = self else { return } - - strongSelf.isStreamingImages = true - strongSelf.streamingPendingFramesCount = 0 - } - } - } - - func stopImageStream() { - if isStreamingImages { - isStreamingImages = false - imageStreamHandler = nil - } else { - reportErrorMessage("Images from camera are not streaming!") - } - } - - func captureOutput( - _ output: AVCaptureOutput, - didOutput sampleBuffer: CMSampleBuffer, - from connection: AVCaptureConnection - ) { - if output == captureVideoOutput.avOutput { - if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { - - pixelBufferSynchronizationQueue.sync { - latestPixelBuffer = newBuffer - } - - onFrameAvailable?() - } - } - - guard CMSampleBufferDataIsReady(sampleBuffer) else { - reportErrorMessage("sample buffer is not ready. Skipping sample") - return - } - - if isStreamingImages { - if let eventSink = imageStreamHandler?.eventSink, - streamingPendingFramesCount < maxStreamingPendingFramesCount - { - streamingPendingFramesCount += 1 - - let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! - // Must lock base address before accessing the pixel data - CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) - - let imageWidth = CVPixelBufferGetWidth(pixelBuffer) - let imageHeight = CVPixelBufferGetHeight(pixelBuffer) - - var planes: [[String: Any]] = [] - - let isPlanar = CVPixelBufferIsPlanar(pixelBuffer) - let planeCount = isPlanar ? CVPixelBufferGetPlaneCount(pixelBuffer) : 1 - - for i in 0.. 0 { - currentSampleTime = CMTimeAdd(currentSampleTime, dur) - } - - if audioIsDisconnected { - audioIsDisconnected = false - - audioTimeOffset = - audioTimeOffset.value == 0 - ? CMTimeSubtract(currentSampleTime, lastAudioSampleTime) - : CMTimeAdd(audioTimeOffset, CMTimeSubtract(currentSampleTime, lastAudioSampleTime)) - - return - } - - lastAudioSampleTime = currentSampleTime - - if audioTimeOffset.value != 0 { - if let adjustedSampleBuffer = copySampleBufferWithAdjustedTime( - sampleBuffer, - by: audioTimeOffset) - { - newAudioSample(adjustedSampleBuffer) - } - } else { - newAudioSample(sampleBuffer) - } - } - } - } - - private func copySampleBufferWithAdjustedTime(_ sample: CMSampleBuffer, by offset: CMTime) - -> CMSampleBuffer? - { - var count: CMItemCount = 0 - CMSampleBufferGetSampleTimingInfoArray( - sample, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count) - - let timingInfo = UnsafeMutablePointer.allocate(capacity: Int(count)) - defer { timingInfo.deallocate() } - - CMSampleBufferGetSampleTimingInfoArray( - sample, entryCount: count, arrayToFill: timingInfo, entriesNeededOut: &count) - - for i in 0.. Unmanaged? { - var pixelBuffer: CVPixelBuffer? - pixelBufferSynchronizationQueue.sync { - pixelBuffer = latestPixelBuffer - latestPixelBuffer = nil - } - - if let buffer = pixelBuffer { - return Unmanaged.passRetained(buffer) - } else { - return nil - } - } - - private func reportErrorMessage(_ errorMessage: String) { - FLTEnsureToRunOnMainQueue { [weak self] in - self?.dartAPI?.reportError(errorMessage) { _ in - // Ignore any errors, as this is just an event broadcast. - } - } - } - - deinit { - motionManager.stopAccelerometerUpdates() - } -} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift new file mode 100644 index 000000000000..ec8d94592358 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -0,0 +1,1453 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import CoreMotion + +// Import Objectice-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +final class DefaultCamera: NSObject, Camera { + var dartAPI: FCPCameraEventApi? + var onFrameAvailable: (() -> Void)? + + var videoFormat: FourCharCode = kCVPixelFormatType_32BGRA { + didSet { + captureVideoOutput.videoSettings = [ + kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: videoFormat) + ] + } + } + + private(set) var isPreviewPaused = false + + var minimumExposureOffset: CGFloat { CGFloat(captureDevice.minExposureTargetBias) } + var maximumExposureOffset: CGFloat { CGFloat(captureDevice.maxExposureTargetBias) } + var minimumAvailableZoomFactor: CGFloat { captureDevice.minAvailableVideoZoomFactor } + var maximumAvailableZoomFactor: CGFloat { captureDevice.maxAvailableVideoZoomFactor } + + /// The queue on which `latestPixelBuffer` property is accessed. + /// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`. + private let pixelBufferSynchronizationQueue = DispatchQueue( + label: "io.flutter.camera.pixelBufferSynchronizationQueue") + + /// The queue on which captured photos (not videos) are written to disk. + /// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation. + private let photoIOQueue = DispatchQueue(label: "io.flutter.camera.photoIOQueue") + + /// All FLTCam's state access and capture session related operations should be run on this queue. + private let captureSessionQueue: DispatchQueue + + private let mediaSettings: FCPPlatformMediaSettings + private let mediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper + + private let videoCaptureSession: FLTCaptureSession + private let audioCaptureSession: FLTCaptureSession + + /// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. + private let captureDeviceFactory: CaptureDeviceFactory + private let audioCaptureDeviceFactory: AudioCaptureDeviceFactory + private let captureDeviceInputFactory: FLTCaptureDeviceInputFactory + private let assetWriterFactory: AssetWriterFactory + private let inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory + + /// A wrapper for CMVideoFormatDescriptionGetDimensions. + /// Allows for alternate implementations in tests. + private let videoDimensionsForFormat: VideoDimensionsForFormat + + private let deviceOrientationProvider: FLTDeviceOrientationProviding + private let motionManager = CMMotionManager() + + private(set) var captureDevice: FLTCaptureDevice + // Setter exposed for tests. + var captureVideoOutput: FLTCaptureVideoDataOutput + // Setter exposed for tests. + var capturePhotoOutput: FLTCapturePhotoOutput + private var captureVideoInput: FLTCaptureInput + + private var videoWriter: FLTAssetWriter? + private var videoWriterInput: FLTAssetWriterInput? + private var audioWriterInput: FLTAssetWriterInput? + private var assetWriterPixelBufferAdaptor: FLTAssetWriterInputPixelBufferAdaptor? + private var videoAdaptor: FLTAssetWriterInputPixelBufferAdaptor? + + /// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the + /// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the + /// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo + /// capture operations may overlap, so FLTCam has to keep track of multiple delegates in progress, + /// instead of just a single delegate reference. + private(set) var inProgressSavePhotoDelegates = [Int64: FLTSavePhotoDelegate]() + + private var imageStreamHandler: FLTImageStreamHandler? + + private var textureId: Int64? + private var previewSize: CGSize? + private var deviceOrientation = UIDeviceOrientation.unknown + + /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. + /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. + private var latestPixelBuffer: CVPixelBuffer? + + private var videoRecordingPath: String? + private var isRecording = false + private var isRecordingPaused = false + private var isFirstVideoSample = false + private var videoIsDisconnected = false + private var audioIsDisconnected = false + private var isAudioSetup = false + private var lastVideoSampleTime = CMTime.zero + private var lastAudioSampleTime = CMTime.zero + private var videoTimeOffset = CMTime.zero + private var audioTimeOffset = CMTime.zero + + /// True when images from the camera are being streamed. + private(set) var isStreamingImages = false + + /// Number of frames currently pending processing. + private var streamingPendingFramesCount = 0 + + /// Maximum number of frames pending processing. + /// To limit memory consumption, limit the number of frames pending processing. + /// After some testing, 4 was determined to be the best maximuńm value. + /// https://github.com/flutter/plugins/pull/4520#discussion_r766335637 + private var maxStreamingPendingFramesCount = 4 + + private var fileFormat = FCPPlatformImageFileFormat.jpeg + private var lockedCaptureOrientation = UIDeviceOrientation.unknown + private var exposureMode = FCPPlatformExposureMode.auto + private var focusMode = FCPPlatformFocusMode.auto + private var flashMode: FCPPlatformFlashMode + + private static func flutterErrorFromNSError(_ error: NSError) -> FlutterError { + return FlutterError( + code: "Error \(error.code)", + message: error.localizedDescription, + details: error.domain) + } + + // Returns frame rate supported by format closest to targetFrameRate. + private static func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double) + -> Double + { + var bestFrameRate = 0.0 + var minDistance = Double.greatestFiniteMagnitude + for range in format.videoSupportedFrameRateRanges { + let frameRate = min( + max(targetFrameRate, Double(range.minFrameRate)), Double(range.maxFrameRate)) + let distance = abs(frameRate - targetFrameRate) + if distance < minDistance { + bestFrameRate = frameRate + minDistance = distance + } + } + return bestFrameRate + } + + // Finds format with same resolution as current activeFormat in captureDevice for which + // bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. + // Preferred are formats with the same subtype as current activeFormat. Sets this format + // as activeFormat and also updates mediaSettings.framesPerSecond to value which + // bestFrameRateForFormat returned for that format. + private static func selectBestFormatForRequestedFrameRate( + captureDevice: FLTCaptureDevice, + mediaSettings: FCPPlatformMediaSettings, + targetFrameRate: Double, + videoDimensionsForFormat: (FLTCaptureDeviceFormat) -> CMVideoDimensions + ) { + let targetResolution = videoDimensionsForFormat(captureDevice.activeFormat) + let preferredSubType = CMFormatDescriptionGetMediaSubType( + captureDevice.activeFormat.formatDescription) + var bestFormat = captureDevice.activeFormat + var _bestFrameRate = bestFrameRate(for: bestFormat, targetFrameRate: targetFrameRate) + var minDistance = abs(_bestFrameRate - targetFrameRate) + var isBestSubTypePreferred = true + + for format in captureDevice.formats { + let resolution = videoDimensionsForFormat(format) + if resolution.width != targetResolution.width || resolution.height != targetResolution.height + { + continue + } + let frameRate = bestFrameRate(for: format, targetFrameRate: targetFrameRate) + let distance = abs(frameRate - targetFrameRate) + let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) + let isSubTypePreferred = subType == preferredSubType + if distance < minDistance + || (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred) + { + bestFormat = format + _bestFrameRate = frameRate + minDistance = distance + isBestSubTypePreferred = isSubTypePreferred + } + } + captureDevice.activeFormat = bestFormat + mediaSettings.framesPerSecond = NSNumber(value: _bestFrameRate) + } + + private static func createConnection( + captureDevice: FLTCaptureDevice, + videoFormat: FourCharCode, + captureDeviceInputFactory: FLTCaptureDeviceInputFactory + ) throws -> (FLTCaptureInput, FLTCaptureVideoDataOutput, AVCaptureConnection) { + // Setup video capture input. + let captureVideoInput = try captureDeviceInputFactory.deviceInput(with: captureDevice) + + // Setup video capture output. + let captureVideoOutput = FLTDefaultCaptureVideoDataOutput( + captureVideoOutput: AVCaptureVideoDataOutput()) + captureVideoOutput.videoSettings = [ + kCVPixelBufferPixelFormatTypeKey as String: videoFormat as Any + ] + captureVideoOutput.alwaysDiscardsLateVideoFrames = true + + // Setup video capture connection. + let connection = AVCaptureConnection( + inputPorts: captureVideoInput.ports, + output: captureVideoOutput.avOutput) + + if captureDevice.position == .front { + connection.isVideoMirrored = true + } + + return (captureVideoInput, captureVideoOutput, connection) + } + + init(configuration: FLTCamConfiguration) throws { + captureSessionQueue = configuration.captureSessionQueue + mediaSettings = configuration.mediaSettings + mediaSettingsAVWrapper = configuration.mediaSettingsWrapper + videoCaptureSession = configuration.videoCaptureSession + audioCaptureSession = configuration.audioCaptureSession + captureDeviceFactory = configuration.captureDeviceFactory + audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory + captureDeviceInputFactory = configuration.captureDeviceInputFactory + assetWriterFactory = configuration.assetWriterFactory + inputPixelBufferAdaptorFactory = configuration.inputPixelBufferAdaptorFactory + videoDimensionsForFormat = configuration.videoDimensionsForFormat + deviceOrientationProvider = configuration.deviceOrientationProvider + + captureDevice = captureDeviceFactory(configuration.initialCameraName) + flashMode = captureDevice.hasFlash ? .auto : .off + + capturePhotoOutput = FLTDefaultCapturePhotoOutput(photoOutput: AVCapturePhotoOutput()) + capturePhotoOutput.highResolutionCaptureEnabled = true + + videoCaptureSession.automaticallyConfiguresApplicationAudioSession = false + audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false + + deviceOrientation = configuration.orientation + + let connection: AVCaptureConnection + (captureVideoInput, captureVideoOutput, connection) = try DefaultCamera.createConnection( + captureDevice: captureDevice, + videoFormat: videoFormat, + captureDeviceInputFactory: configuration.captureDeviceInputFactory) + + super.init() + + captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + + videoCaptureSession.addInputWithNoConnections(captureVideoInput) + videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) + videoCaptureSession.addConnection(connection) + + videoCaptureSession.addOutput(capturePhotoOutput.avOutput) + + motionManager.startAccelerometerUpdates() + + if let targetFrameRate = mediaSettings.framesPerSecond { + // The frame rate can be changed only on a locked for configuration device. + try mediaSettingsAVWrapper.lockDevice(captureDevice) + mediaSettingsAVWrapper.beginConfiguration(for: videoCaptureSession) + + // Possible values for presets are hard-coded in FLT interface having + // corresponding AVCaptureSessionPreset counterparts. + // If _resolutionPreset is not supported by camera there is + // fallback to lower resolution presets. + // If none can be selected there is error condition. + do { + try setCaptureSessionPreset(mediaSettings.resolutionPreset) + } catch { + videoCaptureSession.commitConfiguration() + captureDevice.unlockForConfiguration() + throw error + } + + DefaultCamera.selectBestFormatForRequestedFrameRate( + captureDevice: captureDevice, + mediaSettings: mediaSettings, + targetFrameRate: targetFrameRate.doubleValue, + videoDimensionsForFormat: videoDimensionsForFormat + ) + + if let framesPerSecond = mediaSettings.framesPerSecond { + // Set frame rate with 1/10 precision allowing non-integral values. + let fpsNominator = floor(framesPerSecond.doubleValue * 10.0) + let duration = CMTimeMake(value: 10, timescale: Int32(fpsNominator)) + + mediaSettingsAVWrapper.setMinFrameDuration(duration, on: captureDevice) + mediaSettingsAVWrapper.setMaxFrameDuration(duration, on: captureDevice) + } + + mediaSettingsAVWrapper.commitConfiguration(for: videoCaptureSession) + mediaSettingsAVWrapper.unlockDevice(captureDevice) + } else { + // If the frame rate is not important fall to a less restrictive + // behavior (no configuration locking). + try setCaptureSessionPreset(mediaSettings.resolutionPreset) + } + + updateOrientation() + } + + private func setCaptureSessionPreset( + _ resolutionPreset: FCPPlatformResolutionPreset + ) throws { + switch resolutionPreset { + case .max: + if let bestFormat = highestResolutionFormat(forCaptureDevice: captureDevice) { + videoCaptureSession.sessionPreset = .inputPriority + if (try? captureDevice.lockForConfiguration()) != nil { + // Set the best device format found and finish the device configuration. + captureDevice.activeFormat = bestFormat + captureDevice.unlockForConfiguration() + break + } + } + fallthrough + case .ultraHigh: + if videoCaptureSession.canSetSessionPreset(.hd4K3840x2160) { + videoCaptureSession.sessionPreset = .hd4K3840x2160 + break + } + if videoCaptureSession.canSetSessionPreset(.high) { + videoCaptureSession.sessionPreset = .high + break + } + fallthrough + case .veryHigh: + if videoCaptureSession.canSetSessionPreset(.hd1920x1080) { + videoCaptureSession.sessionPreset = .hd1920x1080 + break + } + fallthrough + case .high: + if videoCaptureSession.canSetSessionPreset(.hd1280x720) { + videoCaptureSession.sessionPreset = .hd1280x720 + break + } + fallthrough + case .medium: + if videoCaptureSession.canSetSessionPreset(.vga640x480) { + videoCaptureSession.sessionPreset = .vga640x480 + break + } + fallthrough + case .low: + if videoCaptureSession.canSetSessionPreset(.cif352x288) { + videoCaptureSession.sessionPreset = .cif352x288 + break + } + fallthrough + default: + if videoCaptureSession.canSetSessionPreset(.low) { + videoCaptureSession.sessionPreset = .low + } else { + throw NSError( + domain: NSCocoaErrorDomain, + code: URLError.unknown.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "No capture session available for current capture session." + ]) + } + } + + let size = videoDimensionsForFormat(captureDevice.activeFormat) + previewSize = CGSize(width: CGFloat(size.width), height: CGFloat(size.height)) + audioCaptureSession.sessionPreset = videoCaptureSession.sessionPreset + } + + /// Finds the highest available resolution in terms of pixel count for the given device. + /// Preferred are formats with the same subtype as current activeFormat. + private func highestResolutionFormat(forCaptureDevice captureDevice: FLTCaptureDevice) + -> FLTCaptureDeviceFormat? + { + let preferredSubType = CMFormatDescriptionGetMediaSubType( + captureDevice.activeFormat.formatDescription) + var bestFormat: FLTCaptureDeviceFormat? = nil + var maxPixelCount: UInt = 0 + var isBestSubTypePreferred = false + + for format in captureDevice.formats { + let resolution = videoDimensionsForFormat(format) + let height = UInt(resolution.height) + let width = UInt(resolution.width) + let pixelCount = height * width + let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) + let isSubTypePreferred = subType == preferredSubType + + if pixelCount > maxPixelCount + || (pixelCount == maxPixelCount && isSubTypePreferred && !isBestSubTypePreferred) + { + bestFormat = format + maxPixelCount = pixelCount + isBestSubTypePreferred = isSubTypePreferred + } + } + + return bestFormat + } + + func setUpCaptureSessionForAudioIfNeeded() { + // Don't setup audio twice or we will lose the audio. + guard !mediaSettings.enableAudio || !isAudioSetup else { return } + + let audioDevice = audioCaptureDeviceFactory() + do { + // Create a device input with the device and add it to the session. + // Setup the audio input. + let audioInput = try captureDeviceInputFactory.deviceInput(with: audioDevice) + + // Setup the audio output. + let audioOutput = AVCaptureAudioDataOutput() + + let block = { + // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other + // plugins like video_player. + DefaultCamera.upgradeAudioSessionCategory( + requestedCategory: .playAndRecord, + options: [.defaultToSpeaker, .allowBluetoothA2DP, .allowAirPlay] + ) + } + + if !Thread.isMainThread { + DispatchQueue.main.sync(execute: block) + } else { + block() + } + + if audioCaptureSession.canAddInput(audioInput) { + audioCaptureSession.addInput(audioInput) + + if audioCaptureSession.canAddOutput(audioOutput) { + audioCaptureSession.addOutput(audioOutput) + audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + isAudioSetup = true + } else { + reportErrorMessage("Unable to add Audio input/output to session capture") + isAudioSetup = false + } + } + } catch let error as NSError { + reportErrorMessage(error.description) + } + } + + // This function, although slightly modified, is also in video_player_avfoundation. + // Both need to do the same thing and run on the same thread (for example main thread). + // Configure application wide audio session manually to prevent overwriting flag + // MixWithOthers by capture session. + // Only change category if it is considered an upgrade which means it can only enable + // ability to play in silent mode or ability to record audio but never disables it, + // that could affect other plugins which depend on this global state. Only change + // category or options if there is change to prevent unnecessary lags and silence. + private static func upgradeAudioSessionCategory( + requestedCategory: AVAudioSession.Category, + options: AVAudioSession.CategoryOptions + ) { + let playCategories: Set = [.playback, .playAndRecord] + let recordCategories: Set = [.record, .playAndRecord] + let requiredCategories: Set = [ + requestedCategory, AVAudioSession.sharedInstance().category, + ] + + let requiresPlay = !requiredCategories.isDisjoint(with: playCategories) + let requiresRecord = !requiredCategories.isDisjoint(with: recordCategories) + + var finalCategory = requestedCategory + if requiresPlay && requiresRecord { + finalCategory = .playAndRecord + } else if requiresPlay { + finalCategory = .playback + } else if requiresRecord { + finalCategory = .record + } + + let finalOptions = AVAudioSession.sharedInstance().categoryOptions.union(options) + + if finalCategory == AVAudioSession.sharedInstance().category + && finalOptions == AVAudioSession.sharedInstance().categoryOptions + { + return + } + + try? AVAudioSession.sharedInstance().setCategory(finalCategory, options: finalOptions) + } + + func reportInitializationState() { + // Get all the state on the current thread, not the main thread. + let state = FCPPlatformCameraState.make( + withPreviewSize: FCPPlatformSize.make( + withWidth: Double(previewSize!.width), + height: Double(previewSize!.height) + ), + exposureMode: exposureMode, + focusMode: focusMode, + exposurePointSupported: captureDevice.isExposurePointOfInterestSupported, + focusPointSupported: captureDevice.isFocusPointOfInterestSupported + ) + + FLTEnsureToRunOnMainQueue { [weak self] in + self?.dartAPI?.initialized(with: state) { _ in + // Ignore any errors, as this is just an event broadcast. + } + } + } + + func receivedImageStreamData() { + streamingPendingFramesCount -= 1 + } + + func start() { + videoCaptureSession.startRunning() + audioCaptureSession.startRunning() + } + + func stop() { + videoCaptureSession.stopRunning() + audioCaptureSession.stopRunning() + } + + func startVideoRecording( + completion: @escaping (FlutterError?) -> Void, + messengerForStreaming messenger: FlutterBinaryMessenger? + ) { + guard !isRecording else { + completion( + FlutterError( + code: "Error", + message: "Video is already recording", + details: nil)) + return + } + + if let messenger = messenger { + startImageStream(with: messenger) + } + + let videoRecordingPath: String + do { + videoRecordingPath = try getTemporaryFilePath( + withExtension: "mp4", + subfolder: "videos", + prefix: "REC_") + self.videoRecordingPath = videoRecordingPath + } catch let error as NSError { + completion(DefaultCamera.flutterErrorFromNSError(error)) + return + } + + guard setupWriter(forPath: videoRecordingPath) else { + completion( + FlutterError( + code: "IOError", + message: "Setup Writer Failed", + details: nil)) + return + } + + // startWriting should not be called in didOutputSampleBuffer where it can cause state + // in which _isRecording is YES but _videoWriter.status is AVAssetWriterStatusUnknown + // in stopVideoRecording if it is called after startVideoRecording but before + // didOutputSampleBuffer had chance to call startWriting and lag at start of video + // https://github.com/flutter/flutter/issues/132016 + // https://github.com/flutter/flutter/issues/151319 + videoWriter?.startWriting() + isFirstVideoSample = true + isRecording = true + isRecordingPaused = false + videoTimeOffset = CMTimeMake(value: 0, timescale: 1) + audioTimeOffset = CMTimeMake(value: 0, timescale: 1) + videoIsDisconnected = false + audioIsDisconnected = false + completion(nil) + } + + private func setupWriter(forPath path: String) -> Bool { + setUpCaptureSessionForAudioIfNeeded() + + var error: NSError? + videoWriter = assetWriterFactory(URL(fileURLWithPath: path), AVFileType.mp4, &error) + + guard let videoWriter = videoWriter else { + if let error = error { + reportErrorMessage(error.description) + } + return false + } + + var videoSettings = mediaSettingsAVWrapper.recommendedVideoSettingsForAssetWriter( + withFileType: + AVFileType.mp4, + for: captureVideoOutput + ) + + if mediaSettings.videoBitrate != nil || mediaSettings.framesPerSecond != nil { + var compressionProperties: [String: Any] = [:] + + if let videoBitrate = mediaSettings.videoBitrate { + compressionProperties[AVVideoAverageBitRateKey] = videoBitrate + } + + if let framesPerSecond = mediaSettings.framesPerSecond { + compressionProperties[AVVideoExpectedSourceFrameRateKey] = framesPerSecond + } + + videoSettings?[AVVideoCompressionPropertiesKey] = compressionProperties + } + + let videoWriterInput = mediaSettingsAVWrapper.assetWriterVideoInput( + withOutputSettings: videoSettings) + self.videoWriterInput = videoWriterInput + + let sourcePixelBufferAttributes: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: videoFormat + ] + + videoAdaptor = inputPixelBufferAdaptorFactory(videoWriterInput, sourcePixelBufferAttributes) + + videoWriterInput.expectsMediaDataInRealTime = true + + // Add the audio input + if mediaSettings.enableAudio { + var acl = AudioChannelLayout() + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono + + let aclSize = MemoryLayout.size(ofValue: acl) + let aclData = Data(bytes: &acl, count: aclSize) + + var audioSettings: [String: Any] = [ + AVFormatIDKey: kAudioFormatMPEG4AAC, + AVSampleRateKey: 44100.0, + AVNumberOfChannelsKey: 1, + AVChannelLayoutKey: aclData, + ] + + if let audioBitrate = mediaSettings.audioBitrate { + audioSettings[AVEncoderBitRateKey] = audioBitrate + } + + let newAudioWriterInput = mediaSettingsAVWrapper.assetWriterAudioInput( + withOutputSettings: audioSettings) + newAudioWriterInput.expectsMediaDataInRealTime = true + mediaSettingsAVWrapper.addInput(newAudioWriterInput, to: videoWriter) + self.audioWriterInput = newAudioWriterInput + } + + if flashMode == .torch { + try? captureDevice.lockForConfiguration() + captureDevice.torchMode = .on + captureDevice.unlockForConfiguration() + } + + mediaSettingsAVWrapper.addInput(videoWriterInput, to: videoWriter) + + captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + + return true + } + + func pauseVideoRecording() { + isRecordingPaused = true + videoIsDisconnected = true + audioIsDisconnected = true + } + + func resumeVideoRecording() { + isRecordingPaused = false + } + + func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + if isRecording { + isRecording = false + + // When `isRecording` is true, `startWriting` was already called, so `videoWriter.status` + // is always either `.writing` or `.failed`. `finishWriting` does not throw exceptions, + // so there is no need to check `videoWriter.status` beforehand. + videoWriter?.finishWriting { + if self.videoWriter?.status == .completed { + self.updateOrientation() + completion(self.videoRecordingPath, nil) + self.videoRecordingPath = nil + } else { + completion( + nil, + FlutterError( + code: "IOError", + message: "AVAssetWriter could not finish writing!", + details: nil)) + } + } + } else { + let error = NSError( + domain: NSCocoaErrorDomain, + code: URLError.resourceUnavailable.rawValue, + userInfo: [NSLocalizedDescriptionKey: "Video is not recording!"] + ) + completion(nil, DefaultCamera.flutterErrorFromNSError(error)) + } + } + + func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + var settings = AVCapturePhotoSettings() + + if mediaSettings.resolutionPreset == .max { + settings.isHighResolutionPhotoEnabled = true + } + + var fileExtension: String + + let isHEVCCodecAvailable = capturePhotoOutput.availablePhotoCodecTypes.contains( + AVVideoCodecType.hevc) + + if fileFormat == .heif, isHEVCCodecAvailable { + settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) + fileExtension = "heif" + } else { + fileExtension = "jpg" + } + + if flashMode != .torch { + settings.flashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(flashMode) + } + + let path: String + do { + path = try getTemporaryFilePath( + withExtension: fileExtension, + subfolder: "pictures", + prefix: "CAP_") + } catch let error as NSError { + completion(nil, DefaultCamera.flutterErrorFromNSError(error)) + return + } + + let savePhotoDelegate = FLTSavePhotoDelegate( + path: path, + ioQueue: photoIOQueue, + completionHandler: { [weak self] path, error in + guard let strongSelf = self else { return } + + strongSelf.captureSessionQueue.async { + if let strongSelf = self { + strongSelf.inProgressSavePhotoDelegates.removeValue( + forKey: settings.uniqueID) + } + } + + if let error = error { + completion(nil, DefaultCamera.flutterErrorFromNSError(error as NSError)) + } else { + assert(path != nil, "Path must not be nil if no error.") + completion(path, nil) + } + } + ) + + assert( + DispatchQueue.getSpecific(key: captureSessionQueueSpecificKey) + == captureSessionQueueSpecificValue, + "save photo delegate references must be updated on the capture session queue") + inProgressSavePhotoDelegates[settings.uniqueID] = savePhotoDelegate + capturePhotoOutput.capturePhoto(with: settings, delegate: savePhotoDelegate) + + } + + private func getTemporaryFilePath(withExtension ext: String, subfolder: String, prefix: String) + throws + -> String + { + let docDir = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] + let fileDir = docDir.appendingPathComponent("camera").appendingPathComponent(subfolder) + let fileName = prefix + UUID().uuidString + let file = fileDir.appendingPathComponent(fileName).appendingPathExtension(ext).path + + let fileManager = FileManager.default + if !fileManager.fileExists(atPath: fileDir.path) { + try fileManager.createDirectory( + at: fileDir, + withIntermediateDirectories: true, + attributes: nil) + } + + return file + } + + func setDeviceOrientation(_ orientation: UIDeviceOrientation) { + if deviceOrientation == orientation { + return + } + + deviceOrientation = orientation + updateOrientation() + } + + private func updateOrientation() { + guard !isRecording else { return } + + let orientation: UIDeviceOrientation = + (lockedCaptureOrientation != .unknown) + ? lockedCaptureOrientation + : deviceOrientation + + updateOrientation(orientation, forCaptureOutput: capturePhotoOutput) + updateOrientation(orientation, forCaptureOutput: captureVideoOutput) + } + + private func updateOrientation( + _ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: FLTCaptureOutput + ) { + if let connection = captureOutput.connection(withMediaType: .video), + connection.isVideoOrientationSupported + { + connection.videoOrientation = getVideoOrientation(forDeviceOrientation: orientation) + } + } + + private func getVideoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation) + -> AVCaptureVideoOrientation + { + switch deviceOrientation { + case .portrait: + return .portrait + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + case .portraitUpsideDown: + return .portraitUpsideDown + default: + return .portrait + } + } + + func lockCaptureOrientation(_ pigeonOrientation: FCPPlatformDeviceOrientation) { + let orientation = FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation) + if lockedCaptureOrientation != orientation { + lockedCaptureOrientation = orientation + updateOrientation() + } + } + + func unlockCaptureOrientation() { + lockedCaptureOrientation = .unknown + updateOrientation() + } + + func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + self.fileFormat = fileFormat + } + + func setExposureMode(_ mode: FCPPlatformExposureMode) { + exposureMode = mode + applyExposureMode() + } + + private func applyExposureMode() { + try? captureDevice.lockForConfiguration() + switch exposureMode { + case .locked: + // AVCaptureExposureMode.autoExpose automatically adjusts the exposure one time, and then locks exposure for the device + captureDevice.setExposureMode(.autoExpose) + case .auto: + if captureDevice.isExposureModeSupported(.continuousAutoExposure) { + captureDevice.setExposureMode(.continuousAutoExposure) + } else { + captureDevice.setExposureMode(.autoExpose) + } + @unknown default: + assertionFailure("Unknown exposure mode") + } + captureDevice.unlockForConfiguration() + } + + func setExposureOffset(_ offset: Double) { + try? captureDevice.lockForConfiguration() + captureDevice.setExposureTargetBias(Float(offset), completionHandler: nil) + captureDevice.unlockForConfiguration() + } + + func setExposurePoint( + _ point: FCPPlatformPoint?, withCompletion completion: @escaping (FlutterError?) -> Void + ) { + guard captureDevice.isExposurePointOfInterestSupported else { + completion( + FlutterError( + code: "setExposurePointFailed", + message: "Device does not have exposure point capabilities", + details: nil)) + return + } + + let orientation = UIDevice.current.orientation + try? captureDevice.lockForConfiguration() + // A nil point resets to the center. + let exposurePoint = cgPoint( + for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) + captureDevice.setExposurePointOfInterest(exposurePoint) + captureDevice.unlockForConfiguration() + // Retrigger auto exposure + applyExposureMode() + completion(nil) + } + + func setFocusMode(_ mode: FCPPlatformFocusMode) { + focusMode = mode + applyFocusMode() + } + + func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { + guard captureDevice.isFocusPointOfInterestSupported else { + completion( + FlutterError( + code: "setFocusPointFailed", + message: "Device does not have focus point capabilities", + details: nil)) + return + } + + let orientation = deviceOrientationProvider.orientation() + try? captureDevice.lockForConfiguration() + // A nil point resets to the center. + captureDevice.setFocusPointOfInterest( + cgPoint(for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) + ) + captureDevice.unlockForConfiguration() + // Retrigger auto focus + applyFocusMode() + completion(nil) + } + + private func applyFocusMode() { + applyFocusMode(focusMode, onDevice: captureDevice) + } + + private func applyFocusMode( + _ focusMode: FCPPlatformFocusMode, onDevice captureDevice: FLTCaptureDevice + ) { + try? captureDevice.lockForConfiguration() + switch focusMode { + case .locked: + // AVCaptureFocusMode.autoFocus automatically adjusts the focus one time, and then locks focus + if captureDevice.isFocusModeSupported(.autoFocus) { + captureDevice.setFocusMode(.autoFocus) + } + case .auto: + if captureDevice.isFocusModeSupported(.continuousAutoFocus) { + captureDevice.setFocusMode(.continuousAutoFocus) + } else if captureDevice.isFocusModeSupported(.autoFocus) { + captureDevice.setFocusMode(.autoFocus) + } + @unknown default: + assertionFailure("Unknown focus mode") + } + captureDevice.unlockForConfiguration() + } + + private func cgPoint( + for point: FCPPlatformPoint, withOrientation orientation: UIDeviceOrientation + ) + -> CGPoint + { + var x = point.x + var y = point.y + switch orientation { + case .portrait: // 90 ccw + y = 1 - point.x + x = point.y + case .portraitUpsideDown: // 90 cw + x = 1 - point.y + y = point.x + case .landscapeRight: // 180 + x = 1 - point.x + y = 1 - point.y + case .landscapeLeft: + // No rotation required + break + default: + // No rotation required + break + } + return CGPoint(x: x, y: y) + } + + func setZoomLevel(_ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void) { + if zoom < captureDevice.minAvailableVideoZoomFactor + || zoom > captureDevice.maxAvailableVideoZoomFactor + { + completion( + FlutterError( + code: "ZOOM_ERROR", + message: + "Zoom level out of bounds (zoom level should be between \(captureDevice.minAvailableVideoZoomFactor) and \(captureDevice.maxAvailableVideoZoomFactor).", + details: nil)) + return + } + + do { + try captureDevice.lockForConfiguration() + } catch let error as NSError { + completion(DefaultCamera.flutterErrorFromNSError(error)) + return + } + + captureDevice.videoZoomFactor = zoom + captureDevice.unlockForConfiguration() + completion(nil) + } + + func setFlashMode( + _ mode: FCPPlatformFlashMode, withCompletion completion: @escaping (FlutterError?) -> Void + ) { + if mode == .torch { + guard captureDevice.hasTorch else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Device does not support torch mode", + details: nil) + ) + return + } + guard captureDevice.isTorchAvailable else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Torch mode is currently not available", + details: nil)) + return + } + if captureDevice.torchMode != .on { + try? captureDevice.lockForConfiguration() + captureDevice.torchMode = .on + captureDevice.unlockForConfiguration() + } + } else { + guard captureDevice.hasFlash else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Device does not have flash capabilities", + details: nil)) + return + } + let avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode) + guard capturePhotoOutput.supportedFlashModes.contains(NSNumber(value: avFlashMode.rawValue)) + else { + completion( + FlutterError( + code: "setFlashModeFailed", + message: "Device does not support this specific flash mode", + details: nil)) + return + } + if captureDevice.torchMode != .off { + try? captureDevice.lockForConfiguration() + captureDevice.torchMode = .off + captureDevice.unlockForConfiguration() + } + } + flashMode = mode + completion(nil) + } + + func pausePreview() { + isPreviewPaused = true + } + + func resumePreview() { + isPreviewPaused = false + } + + func setDescriptionWhileRecording( + _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void + ) { + guard isRecording else { + completion( + FlutterError( + code: "setDescriptionWhileRecordingFailed", + message: "Device was not recording", + details: nil)) + return + } + + captureDevice = captureDeviceFactory(cameraName) + + let oldConnection = captureVideoOutput.connection(withMediaType: .video) + + // Stop video capture from the old output. + captureVideoOutput.setSampleBufferDelegate(nil, queue: nil) + + // Remove the old video capture connections. + videoCaptureSession.beginConfiguration() + videoCaptureSession.removeInput(captureVideoInput) + videoCaptureSession.removeOutput(captureVideoOutput.avOutput) + + let newConnection: AVCaptureConnection + + do { + (captureVideoInput, captureVideoOutput, newConnection) = try DefaultCamera.createConnection( + captureDevice: captureDevice, + videoFormat: videoFormat, + captureDeviceInputFactory: captureDeviceInputFactory) + + captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + } catch { + completion( + FlutterError( + code: "VideoError", + message: "Unable to create video connection", + details: nil)) + return + } + + // Keep the same orientation the old connections had. + if let oldConnection = oldConnection, newConnection.isVideoOrientationSupported { + newConnection.videoOrientation = oldConnection.videoOrientation + } + + // Add the new connections to the session. + if !videoCaptureSession.canAddInput(captureVideoInput) { + completion( + FlutterError( + code: "VideoError", + message: "Unable to switch video input", + details: nil)) + } + videoCaptureSession.addInputWithNoConnections(captureVideoInput) + + if !videoCaptureSession.canAddOutput(captureVideoOutput.avOutput) { + completion( + FlutterError( + code: "VideoError", + message: "Unable to switch video output", + details: nil)) + } + videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) + + if !videoCaptureSession.canAddConnection(newConnection) { + completion( + FlutterError( + code: "VideoError", + message: "Unable to switch video connection", + details: nil)) + } + videoCaptureSession.addConnection(newConnection) + videoCaptureSession.commitConfiguration() + + completion(nil) + } + + func startImageStream(with messenger: FlutterBinaryMessenger) { + startImageStream( + with: messenger, + imageStreamHandler: FLTImageStreamHandler(captureSessionQueue: captureSessionQueue) + ) + } + + func startImageStream( + with messenger: FlutterBinaryMessenger, + imageStreamHandler: FLTImageStreamHandler + ) { + if isStreamingImages { + reportErrorMessage("Images from camera are already streaming!") + return + } + + let eventChannel = FlutterEventChannel( + name: "plugins.flutter.io/camera_avfoundation/imageStream", + binaryMessenger: messenger + ) + let threadSafeEventChannel = FLTThreadSafeEventChannel(eventChannel: eventChannel) + + self.imageStreamHandler = imageStreamHandler + threadSafeEventChannel.setStreamHandler(imageStreamHandler) { [weak self] in + guard let strongSelf = self else { return } + + strongSelf.captureSessionQueue.async { [weak self] in + guard let strongSelf = self else { return } + + strongSelf.isStreamingImages = true + strongSelf.streamingPendingFramesCount = 0 + } + } + } + + func stopImageStream() { + if isStreamingImages { + isStreamingImages = false + imageStreamHandler = nil + } else { + reportErrorMessage("Images from camera are not streaming!") + } + } + + func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { + if output == captureVideoOutput.avOutput { + if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { + + pixelBufferSynchronizationQueue.sync { + latestPixelBuffer = newBuffer + } + + onFrameAvailable?() + } + } + + guard CMSampleBufferDataIsReady(sampleBuffer) else { + reportErrorMessage("sample buffer is not ready. Skipping sample") + return + } + + if isStreamingImages { + if let eventSink = imageStreamHandler?.eventSink, + streamingPendingFramesCount < maxStreamingPendingFramesCount + { + streamingPendingFramesCount += 1 + + let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! + // Must lock base address before accessing the pixel data + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + + let imageWidth = CVPixelBufferGetWidth(pixelBuffer) + let imageHeight = CVPixelBufferGetHeight(pixelBuffer) + + var planes: [[String: Any]] = [] + + let isPlanar = CVPixelBufferIsPlanar(pixelBuffer) + let planeCount = isPlanar ? CVPixelBufferGetPlaneCount(pixelBuffer) : 1 + + for i in 0.. 0 { + currentSampleTime = CMTimeAdd(currentSampleTime, dur) + } + + if audioIsDisconnected { + audioIsDisconnected = false + + audioTimeOffset = + audioTimeOffset.value == 0 + ? CMTimeSubtract(currentSampleTime, lastAudioSampleTime) + : CMTimeAdd(audioTimeOffset, CMTimeSubtract(currentSampleTime, lastAudioSampleTime)) + + return + } + + lastAudioSampleTime = currentSampleTime + + if audioTimeOffset.value != 0 { + if let adjustedSampleBuffer = copySampleBufferWithAdjustedTime( + sampleBuffer, + by: audioTimeOffset) + { + newAudioSample(adjustedSampleBuffer) + } + } else { + newAudioSample(sampleBuffer) + } + } + } + } + + private func copySampleBufferWithAdjustedTime(_ sample: CMSampleBuffer, by offset: CMTime) + -> CMSampleBuffer? + { + var count: CMItemCount = 0 + CMSampleBufferGetSampleTimingInfoArray( + sample, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count) + + let timingInfo = UnsafeMutablePointer.allocate(capacity: Int(count)) + defer { timingInfo.deallocate() } + + CMSampleBufferGetSampleTimingInfoArray( + sample, entryCount: count, arrayToFill: timingInfo, entriesNeededOut: &count) + + for i in 0.. Unmanaged? { + var pixelBuffer: CVPixelBuffer? + pixelBufferSynchronizationQueue.sync { + pixelBuffer = latestPixelBuffer + latestPixelBuffer = nil + } + + if let buffer = pixelBuffer { + return Unmanaged.passRetained(buffer) + } else { + return nil + } + } + + private func reportErrorMessage(_ errorMessage: String) { + FLTEnsureToRunOnMainQueue { [weak self] in + self?.dartAPI?.reportError(errorMessage) { _ in + // Ignore any errors, as this is just an event broadcast. + } + } + } + + deinit { + motionManager.stopAccelerometerUpdates() + } +} From 8622d3fcd8abfa21b7d4e1b5c1e7d13c3d700b2c Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 15:11:35 +0200 Subject: [PATCH 09/11] Extract format util methods from DefaultCamera --- .../camera_avfoundation/DefaultCamera.swift | 62 +----------------- .../camera_avfoundation/FormatUtils.swift | 65 +++++++++++++++++++ 2 files changed, 66 insertions(+), 61 deletions(-) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index ec8d94592358..eaf5e3bb65f0 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -127,66 +127,6 @@ final class DefaultCamera: NSObject, Camera { details: error.domain) } - // Returns frame rate supported by format closest to targetFrameRate. - private static func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double) - -> Double - { - var bestFrameRate = 0.0 - var minDistance = Double.greatestFiniteMagnitude - for range in format.videoSupportedFrameRateRanges { - let frameRate = min( - max(targetFrameRate, Double(range.minFrameRate)), Double(range.maxFrameRate)) - let distance = abs(frameRate - targetFrameRate) - if distance < minDistance { - bestFrameRate = frameRate - minDistance = distance - } - } - return bestFrameRate - } - - // Finds format with same resolution as current activeFormat in captureDevice for which - // bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. - // Preferred are formats with the same subtype as current activeFormat. Sets this format - // as activeFormat and also updates mediaSettings.framesPerSecond to value which - // bestFrameRateForFormat returned for that format. - private static func selectBestFormatForRequestedFrameRate( - captureDevice: FLTCaptureDevice, - mediaSettings: FCPPlatformMediaSettings, - targetFrameRate: Double, - videoDimensionsForFormat: (FLTCaptureDeviceFormat) -> CMVideoDimensions - ) { - let targetResolution = videoDimensionsForFormat(captureDevice.activeFormat) - let preferredSubType = CMFormatDescriptionGetMediaSubType( - captureDevice.activeFormat.formatDescription) - var bestFormat = captureDevice.activeFormat - var _bestFrameRate = bestFrameRate(for: bestFormat, targetFrameRate: targetFrameRate) - var minDistance = abs(_bestFrameRate - targetFrameRate) - var isBestSubTypePreferred = true - - for format in captureDevice.formats { - let resolution = videoDimensionsForFormat(format) - if resolution.width != targetResolution.width || resolution.height != targetResolution.height - { - continue - } - let frameRate = bestFrameRate(for: format, targetFrameRate: targetFrameRate) - let distance = abs(frameRate - targetFrameRate) - let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) - let isSubTypePreferred = subType == preferredSubType - if distance < minDistance - || (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred) - { - bestFormat = format - _bestFrameRate = frameRate - minDistance = distance - isBestSubTypePreferred = isSubTypePreferred - } - } - captureDevice.activeFormat = bestFormat - mediaSettings.framesPerSecond = NSNumber(value: _bestFrameRate) - } - private static func createConnection( captureDevice: FLTCaptureDevice, videoFormat: FourCharCode, @@ -276,7 +216,7 @@ final class DefaultCamera: NSObject, Camera { throw error } - DefaultCamera.selectBestFormatForRequestedFrameRate( + FormatUtils.selectBestFormatForRequestedFrameRate( captureDevice: captureDevice, mediaSettings: mediaSettings, targetFrameRate: targetFrameRate.doubleValue, diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift new file mode 100644 index 000000000000..91a9d6a85c8f --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift @@ -0,0 +1,65 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +enum FormatUtils { + // Returns frame rate supported by format closest to targetFrameRate. + private static func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double) + -> Double + { + var bestFrameRate = 0.0 + var minDistance = Double.greatestFiniteMagnitude + for range in format.videoSupportedFrameRateRanges { + let frameRate = min( + max(targetFrameRate, Double(range.minFrameRate)), Double(range.maxFrameRate)) + let distance = abs(frameRate - targetFrameRate) + if distance < minDistance { + bestFrameRate = frameRate + minDistance = distance + } + } + return bestFrameRate + } + + // Finds format with same resolution as current activeFormat in captureDevice for which + // bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. + // Preferred are formats with the same subtype as current activeFormat. Sets this format + // as activeFormat and also updates mediaSettings.framesPerSecond to value which + // bestFrameRateForFormat returned for that format. + static func selectBestFormatForRequestedFrameRate( + captureDevice: FLTCaptureDevice, + mediaSettings: FCPPlatformMediaSettings, + targetFrameRate: Double, + videoDimensionsForFormat: (FLTCaptureDeviceFormat) -> CMVideoDimensions + ) { + let targetResolution = videoDimensionsForFormat(captureDevice.activeFormat) + let preferredSubType = CMFormatDescriptionGetMediaSubType( + captureDevice.activeFormat.formatDescription) + var bestFormat = captureDevice.activeFormat + var _bestFrameRate = bestFrameRate(for: bestFormat, targetFrameRate: targetFrameRate) + var minDistance = abs(_bestFrameRate - targetFrameRate) + var isBestSubTypePreferred = true + + for format in captureDevice.formats { + let resolution = videoDimensionsForFormat(format) + if resolution.width != targetResolution.width || resolution.height != targetResolution.height + { + continue + } + let frameRate = bestFrameRate(for: format, targetFrameRate: targetFrameRate) + let distance = abs(frameRate - targetFrameRate) + let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) + let isSubTypePreferred = subType == preferredSubType + if distance < minDistance + || (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred) + { + bestFormat = format + _bestFrameRate = frameRate + minDistance = distance + isBestSubTypePreferred = isSubTypePreferred + } + } + captureDevice.activeFormat = bestFormat + mediaSettings.framesPerSecond = NSNumber(value: _bestFrameRate) + } +} From 0bde352c2e12d6f516fb24ffa0ff18418f088b0e Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 15:18:02 +0200 Subject: [PATCH 10/11] Remove FLT prefix from ImageStreamHandler --- .../example/ios/RunnerTests/StreamingTests.swift | 2 +- .../Sources/camera_avfoundation/DefaultCamera.swift | 6 +++--- ...FLTImageStreamHandler.swift => ImageStreamHandler.swift} | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/{FLTImageStreamHandler.swift => ImageStreamHandler.swift} (94%) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift index e42f0ad06873..c84a38bf150a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift @@ -12,7 +12,7 @@ import XCTest @testable import camera_avfoundation_objc #endif -private class MockImageStreamHandler: FLTImageStreamHandler { +private class MockImageStreamHandler: ImageStreamHandler { var eventSinkStub: ((Any?) -> Void)? override var eventSink: FlutterEventSink? { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index eaf5e3bb65f0..4e1083e73848 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -80,7 +80,7 @@ final class DefaultCamera: NSObject, Camera { /// instead of just a single delegate reference. private(set) var inProgressSavePhotoDelegates = [Int64: FLTSavePhotoDelegate]() - private var imageStreamHandler: FLTImageStreamHandler? + private var imageStreamHandler: ImageStreamHandler? private var textureId: Int64? private var previewSize: CGSize? @@ -1094,13 +1094,13 @@ final class DefaultCamera: NSObject, Camera { func startImageStream(with messenger: FlutterBinaryMessenger) { startImageStream( with: messenger, - imageStreamHandler: FLTImageStreamHandler(captureSessionQueue: captureSessionQueue) + imageStreamHandler: ImageStreamHandler(captureSessionQueue: captureSessionQueue) ) } func startImageStream( with messenger: FlutterBinaryMessenger, - imageStreamHandler: FLTImageStreamHandler + imageStreamHandler: ImageStreamHandler ) { if isStreamingImages { reportErrorMessage("Images from camera are already streaming!") diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/ImageStreamHandler.swift similarity index 94% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/ImageStreamHandler.swift index 155cd7d85185..46680e488096 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTImageStreamHandler.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/ImageStreamHandler.swift @@ -5,7 +5,7 @@ import Flutter import ObjectiveC -class FLTImageStreamHandler: NSObject, FlutterStreamHandler { +class ImageStreamHandler: NSObject, FlutterStreamHandler { /// The queue on which `eventSink` property should be accessed. let captureSessionQueue: DispatchQueue From 917fe7ee3a744ffc7428d82ced2c6f0a724b7791 Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 8 May 2025 15:46:02 +0200 Subject: [PATCH 11/11] Fix FormatUtils imports for SwiftPM --- .../Sources/camera_avfoundation/FormatUtils.swift | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift index 91a9d6a85c8f..f61ebfbdfe75 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift @@ -2,6 +2,11 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +// Import Objectice-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + enum FormatUtils { // Returns frame rate supported by format closest to targetFrameRate. private static func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double)