diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index 34de3bf88e46..bef115dc1dea 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,195 +1,196 @@ ## NEXT -* Updates minimum supported SDK version to Flutter 3.22/Dart 3.4. +- Updates minimum supported SDK version to Flutter 3.22/Dart 3.4. +- Removes OCMock from tests. ## 0.9.17+5 -* Adds ability to use any supported FPS and fixes crash when using unsupported FPS. +- Adds ability to use any supported FPS and fixes crash when using unsupported FPS. ## 0.9.17+4 -* Updates Pigeon for non-nullable collection type support. -* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. +- Updates Pigeon for non-nullable collection type support. +- Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. ## 0.9.17+3 -* Fixes deallocation of camera on dispose. +- Fixes deallocation of camera on dispose. ## 0.9.17+2 -* Fixes stopVideoRecording waiting indefinitely and lag at start of video. +- Fixes stopVideoRecording waiting indefinitely and lag at start of video. ## 0.9.17+1 -* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO. +- Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO. ## 0.9.17 -* Adds Swift Package Manager compatibility. +- Adds Swift Package Manager compatibility. ## 0.9.16+3 -* Removes unused `maxVideoDuration` code. +- Removes unused `maxVideoDuration` code. ## 0.9.16+2 -* Fixes regression taking a picture in torch mode. +- Fixes regression taking a picture in torch mode. ## 0.9.16+1 -* Fixes sample times not being numeric after pause/resume. +- Fixes sample times not being numeric after pause/resume. ## 0.9.16 -* Converts Dart-to-host communcation to Pigeon. -* Fixes a race condition in camera disposal. +- Converts Dart-to-host communcation to Pigeon. +- Fixes a race condition in camera disposal. ## 0.9.15+4 -* Converts host-to-Dart communcation to Pigeon. +- Converts host-to-Dart communcation to Pigeon. ## 0.9.15+3 -* Moves `pigeon` to `dev_dependencies`. +- Moves `pigeon` to `dev_dependencies`. ## 0.9.15+2 -* Converts camera query to Pigeon. +- Converts camera query to Pigeon. ## 0.9.15+1 -* Simplifies internal handling of method channel responses. +- Simplifies internal handling of method channel responses. ## 0.9.15 -* Adds support to control video FPS and bitrate. See `CameraController.withSettings`. +- Adds support to control video FPS and bitrate. See `CameraController.withSettings`. ## 0.9.14+2 -* Removes `_ambiguate` methods from example code. +- Removes `_ambiguate` methods from example code. ## 0.9.14+1 -* Fixes bug where max resolution preset does not produce highest available resolution on iOS. +- Fixes bug where max resolution preset does not produce highest available resolution on iOS. ## 0.9.14 -* Adds support to HEIF format. +- Adds support to HEIF format. ## 0.9.13+11 -* Fixes a memory leak of sample buffer when pause and resume the video recording. -* Removes development team from example app. -* Updates minimum iOS version to 12.0 and minimum Flutter version to 3.16.6. +- Fixes a memory leak of sample buffer when pause and resume the video recording. +- Removes development team from example app. +- Updates minimum iOS version to 12.0 and minimum Flutter version to 3.16.6. ## 0.9.13+10 -* Adds privacy manifest. +- Adds privacy manifest. ## 0.9.13+9 -* Fixes new lint warnings. +- Fixes new lint warnings. ## 0.9.13+8 -* Updates example app to use non-deprecated video_player method. -* Updates minimum supported SDK version to Flutter 3.10/Dart 3.0. +- Updates example app to use non-deprecated video_player method. +- Updates minimum supported SDK version to Flutter 3.10/Dart 3.0. ## 0.9.13+7 -* Fixes inverted orientation strings. +- Fixes inverted orientation strings. ## 0.9.13+6 -* Fixes incorrect use of `NSError` that could cause crashes on launch. +- Fixes incorrect use of `NSError` that could cause crashes on launch. ## 0.9.13+5 -* Ignores audio samples until the first video sample arrives. +- Ignores audio samples until the first video sample arrives. ## 0.9.13+4 -* Adds pub topics to package metadata. -* Updates minimum supported SDK version to Flutter 3.7/Dart 2.19. +- Adds pub topics to package metadata. +- Updates minimum supported SDK version to Flutter 3.7/Dart 2.19. ## 0.9.13+3 -* Migrates `styleFrom` usage in examples off of deprecated `primary` and `onPrimary` parameters. -* Fixes unawaited_futures violations. +- Migrates `styleFrom` usage in examples off of deprecated `primary` and `onPrimary` parameters. +- Fixes unawaited_futures violations. ## 0.9.13+2 -* Removes obsolete null checks on non-nullable values. -* Updates minimum supported SDK version to Flutter 3.3/Dart 2.18. +- Removes obsolete null checks on non-nullable values. +- Updates minimum supported SDK version to Flutter 3.3/Dart 2.18. ## 0.9.13+1 -* Clarifies explanation of endorsement in README. +- Clarifies explanation of endorsement in README. ## 0.9.13 -* Allows camera to be switched while video recording. -* Aligns Dart and Flutter SDK constraints. +- Allows camera to be switched while video recording. +- Aligns Dart and Flutter SDK constraints. ## 0.9.12 -* Updates minimum Flutter version to 3.3 and iOS 11. +- Updates minimum Flutter version to 3.3 and iOS 11. ## 0.9.11+1 -* Updates links for the merge of flutter/plugins into flutter/packages. +- Updates links for the merge of flutter/plugins into flutter/packages. ## 0.9.11 -* Adds back use of Optional type. -* Updates minimum Flutter version to 3.0. +- Adds back use of Optional type. +- Updates minimum Flutter version to 3.0. ## 0.9.10+2 -* Updates code for stricter lint checks. +- Updates code for stricter lint checks. ## 0.9.10+1 -* Updates code for stricter lint checks. +- Updates code for stricter lint checks. ## 0.9.10 -* Remove usage of deprecated quiver Optional type. +- Remove usage of deprecated quiver Optional type. ## 0.9.9 -* Implements option to also stream when recording a video. +- Implements option to also stream when recording a video. ## 0.9.8+6 -* Updates code for `no_leading_underscores_for_local_identifiers` lint. -* Updates minimum Flutter version to 2.10. +- Updates code for `no_leading_underscores_for_local_identifiers` lint. +- Updates minimum Flutter version to 2.10. ## 0.9.8+5 -* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set. +- Fixes a regression introduced in 0.9.8+4 where the stream handler is not set. ## 0.9.8+4 -* Fixes a crash due to sending orientation change events when the engine is torn down. +- Fixes a crash due to sending orientation change events when the engine is torn down. ## 0.9.8+3 -* Fixes avoid_redundant_argument_values lint warnings and minor typos. -* Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750). +- Fixes avoid_redundant_argument_values lint warnings and minor typos. +- Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750). ## 0.9.8+2 -* Fixes exception in registerWith caused by the switch to an in-package method channel. +- Fixes exception in registerWith caused by the switch to an in-package method channel. ## 0.9.8+1 -* Ignores deprecation warnings for upcoming styleFrom button API changes. +- Ignores deprecation warnings for upcoming styleFrom button API changes. ## 0.9.8 -* Switches to internal method channel implementation. +- Switches to internal method channel implementation. ## 0.9.7+1 -* Splits from `camera` as a federated implementation. +- Splits from `camera` as a federated implementation. diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 041d918da5b9..dbcaf65cfe78 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -10,15 +10,24 @@ 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */; }; 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB766A2665316900CE5A93 /* CameraFocusTests.m */; }; 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; - 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 89D82918721FABF772705DB0 /* libPods-Runner.a */; }; - 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; - 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */ = {isa = PBXBuildFile; productRef = 408D7A782C3C9CD000B71F9A /* OCMock */; }; + 3C036CAD15FEA6FC964935EE /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */; }; 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */; }; 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; - 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; + 78A318202AECB46A00862997 /* BuildFile in Frameworks */ = {isa = PBXBuildFile; }; 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; + 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; + 7FF2D09B2D15AB7A0092C411 /* MockAssetWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */; }; + 7FF2D09C2D15AB7A0092C411 /* MockCameraDeviceDiscovery.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0892D15AB7A0092C411 /* MockCameraDeviceDiscovery.m */; }; + 7FF2D09D2D15AB7A0092C411 /* MockCaptureConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D08B2D15AB7A0092C411 /* MockCaptureConnection.m */; }; + 7FF2D09E2D15AB7A0092C411 /* MockCaptureDeviceController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D08D2D15AB7A0092C411 /* MockCaptureDeviceController.m */; }; + 7FF2D09F2D15AB7A0092C411 /* MockCapturePhotoOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D08F2D15AB7A0092C411 /* MockCapturePhotoOutput.m */; }; + 7FF2D0A02D15AB7A0092C411 /* MockCapturePhotoSettings.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0912D15AB7A0092C411 /* MockCapturePhotoSettings.m */; }; + 7FF2D0A12D15AB7A0092C411 /* MockCaptureSession.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0932D15AB7A0092C411 /* MockCaptureSession.m */; }; + 7FF2D0A22D15AB7A0092C411 /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0952D15AB7A0092C411 /* MockDeviceOrientationProvider.m */; }; + 7FF2D0A32D15AB7A0092C411 /* MockEventChannel.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0972D15AB7A0092C411 /* MockEventChannel.m */; }; + 7FF2D0A42D15AB7A0092C411 /* MockPhotoData.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0992D15AB7A0092C411 /* MockPhotoData.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -30,11 +39,12 @@ E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */; }; E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */; }; E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */; }; - E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */; }; + E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m */; }; E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; }; E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; }; E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; }; E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */; }; + F392940CDE88632C06D6CB59 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 6982009932DF1932663D04D5 /* libPods-RunnerTests.a */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -66,19 +76,42 @@ 03BB766A2665316900CE5A93 /* CameraFocusTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraFocusTests.m; sourceTree = ""; }; 03BB766C2665316900CE5A93 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraOrientationTests.m; sourceTree = ""; }; + 10C4CE57A7EA31FA7C113654 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; - 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; - 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AvailableCamerasTest.m; sourceTree = ""; }; - 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 5A32C345E4881D9C7CE9479C /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; + 6982009932DF1932663D04D5 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 73BD4FD74789D3EB46FB5774 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; 788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraSettingsTests.m; sourceTree = ""; }; - 89D82918721FABF772705DB0 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraExposureTests.m; sourceTree = ""; }; + 7FF2D0862D15AB7A0092C411 /* MockAssetWriter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockAssetWriter.h; sourceTree = ""; }; + 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockAssetWriter.m; sourceTree = ""; }; + 7FF2D0882D15AB7A0092C411 /* MockCameraDeviceDiscovery.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCameraDeviceDiscovery.h; sourceTree = ""; }; + 7FF2D0892D15AB7A0092C411 /* MockCameraDeviceDiscovery.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCameraDeviceDiscovery.m; sourceTree = ""; }; + 7FF2D08A2D15AB7A0092C411 /* MockCaptureConnection.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureConnection.h; sourceTree = ""; }; + 7FF2D08B2D15AB7A0092C411 /* MockCaptureConnection.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureConnection.m; sourceTree = ""; }; + 7FF2D08C2D15AB7A0092C411 /* MockCaptureDeviceController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceController.h; sourceTree = ""; }; + 7FF2D08D2D15AB7A0092C411 /* MockCaptureDeviceController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDeviceController.m; sourceTree = ""; }; + 7FF2D08E2D15AB7A0092C411 /* MockCapturePhotoOutput.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoOutput.h; sourceTree = ""; }; + 7FF2D08F2D15AB7A0092C411 /* MockCapturePhotoOutput.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoOutput.m; sourceTree = ""; }; + 7FF2D0902D15AB7A0092C411 /* MockCapturePhotoSettings.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoSettings.h; sourceTree = ""; }; + 7FF2D0912D15AB7A0092C411 /* MockCapturePhotoSettings.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoSettings.m; sourceTree = ""; }; + 7FF2D0922D15AB7A0092C411 /* MockCaptureSession.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureSession.h; sourceTree = ""; }; + 7FF2D0932D15AB7A0092C411 /* MockCaptureSession.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureSession.m; sourceTree = ""; }; + 7FF2D0942D15AB7A0092C411 /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = ""; }; + 7FF2D0952D15AB7A0092C411 /* MockDeviceOrientationProvider.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockDeviceOrientationProvider.m; sourceTree = ""; }; + 7FF2D0962D15AB7A0092C411 /* MockEventChannel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockEventChannel.h; sourceTree = ""; }; + 7FF2D0972D15AB7A0092C411 /* MockEventChannel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockEventChannel.m; sourceTree = ""; }; + 7FF2D0982D15AB7A0092C411 /* MockPhotoData.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockPhotoData.h; sourceTree = ""; }; + 7FF2D0992D15AB7A0092C411 /* MockPhotoData.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockPhotoData.m; sourceTree = ""; }; + 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 93DE3DA611CB15AE1AF7956C /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -87,15 +120,13 @@ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; - A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */ = {isa = PBXFileReference; indentWidth = 2; lastKnownFileType = sourcecode.c.objc; path = CameraSessionPresetsTests.m; sourceTree = ""; }; E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = QueueUtilsTests.m; sourceTree = ""; }; E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraCaptureSessionQueueRaceConditionTests.m; sourceTree = ""; }; E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTSavePhotoDelegateTests.m; sourceTree = ""; }; E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCamPhotoCaptureTests.m; sourceTree = ""; }; E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FLTCamSampleBufferTests.m; sourceTree = ""; }; - E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPermissionTests.m; sourceTree = ""; }; + E0B0D2BA27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCameraPermissionManagerTests.m; sourceTree = ""; }; E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = ""; }; E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = ""; }; E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = ""; }; @@ -108,8 +139,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */, - 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */, + F392940CDE88632C06D6CB59 /* libPods-RunnerTests.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -117,8 +147,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */, - 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */, + 78A318202AECB46A00862997 /* BuildFile in Frameworks */, + 3C036CAD15FEA6FC964935EE /* libPods-Runner.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -128,8 +158,10 @@ 03BB76692665316900CE5A93 /* RunnerTests */ = { isa = PBXGroup; children = ( + 7FF2D09A2D15AB7A0092C411 /* Mocks */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, + 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */, 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */, 03BB766C2665316900CE5A93 /* Info.plist */, 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */, @@ -137,7 +169,7 @@ E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */, E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */, E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */, - E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */, + E0B0D2BA27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m */, E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */, E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */, E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */, @@ -151,15 +183,42 @@ path = RunnerTests; sourceTree = ""; }; - 3242FD2B467C15C62200632F /* Frameworks */ = { + 483773B8AC5ACAFFC7939408 /* Frameworks */ = { isa = PBXGroup; children = ( - 89D82918721FABF772705DB0 /* libPods-Runner.a */, - 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */, + 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */, + 6982009932DF1932663D04D5 /* libPods-RunnerTests.a */, ); name = Frameworks; sourceTree = ""; }; + 7FF2D09A2D15AB7A0092C411 /* Mocks */ = { + isa = PBXGroup; + children = ( + 7FF2D0862D15AB7A0092C411 /* MockAssetWriter.h */, + 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */, + 7FF2D0882D15AB7A0092C411 /* MockCameraDeviceDiscovery.h */, + 7FF2D0892D15AB7A0092C411 /* MockCameraDeviceDiscovery.m */, + 7FF2D08A2D15AB7A0092C411 /* MockCaptureConnection.h */, + 7FF2D08B2D15AB7A0092C411 /* MockCaptureConnection.m */, + 7FF2D08C2D15AB7A0092C411 /* MockCaptureDeviceController.h */, + 7FF2D08D2D15AB7A0092C411 /* MockCaptureDeviceController.m */, + 7FF2D08E2D15AB7A0092C411 /* MockCapturePhotoOutput.h */, + 7FF2D08F2D15AB7A0092C411 /* MockCapturePhotoOutput.m */, + 7FF2D0902D15AB7A0092C411 /* MockCapturePhotoSettings.h */, + 7FF2D0912D15AB7A0092C411 /* MockCapturePhotoSettings.m */, + 7FF2D0922D15AB7A0092C411 /* MockCaptureSession.h */, + 7FF2D0932D15AB7A0092C411 /* MockCaptureSession.m */, + 7FF2D0942D15AB7A0092C411 /* MockDeviceOrientationProvider.h */, + 7FF2D0952D15AB7A0092C411 /* MockDeviceOrientationProvider.m */, + 7FF2D0962D15AB7A0092C411 /* MockEventChannel.h */, + 7FF2D0972D15AB7A0092C411 /* MockEventChannel.m */, + 7FF2D0982D15AB7A0092C411 /* MockPhotoData.h */, + 7FF2D0992D15AB7A0092C411 /* MockPhotoData.m */, + ); + path = Mocks; + sourceTree = ""; + }; 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( @@ -179,7 +238,7 @@ 03BB76692665316900CE5A93 /* RunnerTests */, 97C146EF1CF9000F007C117D /* Products */, FD386F00E98D73419C929072 /* Pods */, - 3242FD2B467C15C62200632F /* Frameworks */, + 483773B8AC5ACAFFC7939408 /* Frameworks */, ); sourceTree = ""; }; @@ -219,10 +278,10 @@ FD386F00E98D73419C929072 /* Pods */ = { isa = PBXGroup; children = ( - 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */, - 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */, - 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */, - A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */, + 10C4CE57A7EA31FA7C113654 /* Pods-Runner.debug.xcconfig */, + 93DE3DA611CB15AE1AF7956C /* Pods-Runner.release.xcconfig */, + 73BD4FD74789D3EB46FB5774 /* Pods-RunnerTests.debug.xcconfig */, + 5A32C345E4881D9C7CE9479C /* Pods-RunnerTests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -234,7 +293,7 @@ isa = PBXNativeTarget; buildConfigurationList = 03BB76712665316900CE5A93 /* Build configuration list for PBXNativeTarget "RunnerTests" */; buildPhases = ( - 422786A96136AA9087A2041B /* [CP] Check Pods Manifest.lock */, + E15D6BA2F8A105D236FE8B62 /* [CP] Check Pods Manifest.lock */, 03BB76642665316900CE5A93 /* Sources */, 03BB76652665316900CE5A93 /* Frameworks */, 03BB76662665316900CE5A93 /* Resources */, @@ -245,9 +304,6 @@ 03BB766E2665316900CE5A93 /* PBXTargetDependency */, ); name = RunnerTests; - packageProductDependencies = ( - 408D7A782C3C9CD000B71F9A /* OCMock */, - ); productName = camera_exampleTests; productReference = 03BB76682665316900CE5A93 /* RunnerTests.xctest */; productType = "com.apple.product-type.bundle.unit-test"; @@ -256,22 +312,20 @@ isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( - 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */, + 8DA8F843F355F2F3627CE806 /* [CP] Check Pods Manifest.lock */, 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 4A0695CE70E797FA66DE2DC0 /* [CP] Copy Pods Resources */, ); buildRules = ( ); dependencies = ( ); name = Runner; - packageProductDependencies = ( - 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */, - ); productName = Runner; productReference = 97C146EE1CF9000F007C117D /* Runner.app */; productType = "com.apple.product-type.application"; @@ -287,6 +341,7 @@ TargetAttributes = { 03BB76672665316900CE5A93 = { CreatedOnToolsVersion = 12.5; + LastSwiftMigration = 1610; ProvisioningStyle = Automatic; TestTargetID = 97C146ED1CF9000F007C117D; }; @@ -304,10 +359,6 @@ Base, ); mainGroup = 97C146E51CF9000F007C117D; - packageReferences = ( - 781AD8BC2B33823900A9FFBB /* XCLocalSwiftPackageReference "FlutterGeneratedPluginSwiftPackage" */, - 408D7A772C3C9CD000B71F9A /* XCRemoteSwiftPackageReference "ocmock" */, - ); productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; @@ -356,7 +407,29 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; - 422786A96136AA9087A2041B /* [CP] Check Pods Manifest.lock */ = { + 4A0695CE70E797FA66DE2DC0 /* [CP] Copy Pods Resources */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh", + "${PODS_CONFIGURATION_BUILD_DIR}/camera_avfoundation/camera_avfoundation_privacy.bundle", + "${PODS_CONFIGURATION_BUILD_DIR}/path_provider_foundation/path_provider_foundation_privacy.bundle", + "${PODS_CONFIGURATION_BUILD_DIR}/video_player_avfoundation/video_player_avfoundation_privacy.bundle", + ); + name = "[CP] Copy Pods Resources"; + outputPaths = ( + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/camera_avfoundation_privacy.bundle", + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/path_provider_foundation_privacy.bundle", + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/video_player_avfoundation_privacy.bundle", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; + showEnvVarsInLog = 0; + }; + 8DA8F843F355F2F3627CE806 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -371,7 +444,7 @@ outputFileListPaths = ( ); outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; @@ -393,7 +466,7 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; - 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */ = { + E15D6BA2F8A105D236FE8B62 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -408,7 +481,7 @@ outputFileListPaths = ( ); outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; @@ -428,6 +501,7 @@ 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */, 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */, E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, + 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */, E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */, E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */, 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */, @@ -437,7 +511,17 @@ E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */, 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, - E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */, + 7FF2D09B2D15AB7A0092C411 /* MockAssetWriter.m in Sources */, + 7FF2D09C2D15AB7A0092C411 /* MockCameraDeviceDiscovery.m in Sources */, + 7FF2D09D2D15AB7A0092C411 /* MockCaptureConnection.m in Sources */, + 7FF2D09E2D15AB7A0092C411 /* MockCaptureDeviceController.m in Sources */, + 7FF2D09F2D15AB7A0092C411 /* MockCapturePhotoOutput.m in Sources */, + 7FF2D0A02D15AB7A0092C411 /* MockCapturePhotoSettings.m in Sources */, + 7FF2D0A12D15AB7A0092C411 /* MockCaptureSession.m in Sources */, + 7FF2D0A22D15AB7A0092C411 /* MockDeviceOrientationProvider.m in Sources */, + 7FF2D0A32D15AB7A0092C411 /* MockEventChannel.m in Sources */, + 7FF2D0A42D15AB7A0092C411 /* MockPhotoData.m in Sources */, + E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */, E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -484,7 +568,7 @@ /* Begin XCBuildConfiguration section */ 03BB766F2665316900CE5A93 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */; + baseConfigurationReference = 73BD4FD74789D3EB46FB5774 /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; @@ -494,6 +578,7 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO; CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; GCC_C_LANGUAGE_STANDARD = gnu11; INFOPLIST_FILE = RunnerTests/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 12.0; @@ -513,7 +598,7 @@ }; 03BB76702665316900CE5A93 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */; + baseConfigurationReference = 5A32C345E4881D9C7CE9479C /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; @@ -724,36 +809,6 @@ defaultConfigurationName = Release; }; /* End XCConfigurationList section */ - -/* Begin XCLocalSwiftPackageReference section */ - 781AD8BC2B33823900A9FFBB /* XCLocalSwiftPackageReference "FlutterGeneratedPluginSwiftPackage" */ = { - isa = XCLocalSwiftPackageReference; - relativePath = Flutter/ephemeral/Packages/FlutterGeneratedPluginSwiftPackage; - }; -/* End XCLocalSwiftPackageReference section */ - -/* Begin XCRemoteSwiftPackageReference section */ - 408D7A772C3C9CD000B71F9A /* XCRemoteSwiftPackageReference "ocmock" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/erikdoe/ocmock"; - requirement = { - kind = revision; - revision = fe1661a3efed11831a6452f4b1a0c5e6ddc08c3d; - }; - }; -/* End XCRemoteSwiftPackageReference section */ - -/* Begin XCSwiftPackageProductDependency section */ - 408D7A782C3C9CD000B71F9A /* OCMock */ = { - isa = XCSwiftPackageProductDependency; - package = 408D7A772C3C9CD000B71F9A /* XCRemoteSwiftPackageReference "ocmock" */; - productName = OCMock; - }; - 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */ = { - isa = XCSwiftPackageProductDependency; - productName = FlutterGeneratedPluginSwiftPackage; - }; -/* End XCSwiftPackageProductDependency section */ }; rootObject = 97C146E61CF9000F007C117D /* Project object */; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m index f26a8dc48f16..a2db7ece30dc 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m @@ -8,33 +8,52 @@ #endif @import XCTest; @import AVFoundation; -#import + +#import "MockCameraDeviceDiscovery.h" +#import "MockCaptureDeviceController.h" @interface AvailableCamerasTest : XCTestCase +@property(nonatomic, strong) MockCameraDeviceDiscovery *mockDeviceDiscovery; +@property(nonatomic, strong) CameraPlugin *cameraPlugin; @end @implementation AvailableCamerasTest +- (void)setUp { + [super setUp]; + + self.mockDeviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; + self.cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:_mockDeviceDiscovery + sessionFactory:^id { + return nil; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; +} + - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; // iPhone 13 Cameras: - AVCaptureDevice *wideAngleCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([wideAngleCamera uniqueID]).andReturn(@"0"); - OCMStub([wideAngleCamera position]).andReturn(AVCaptureDevicePositionBack); + MockCaptureDeviceController *wideAngleCamera = [[MockCaptureDeviceController alloc] init]; + wideAngleCamera.uniqueID = @"0"; + wideAngleCamera.position = AVCaptureDevicePositionBack; - AVCaptureDevice *frontFacingCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([frontFacingCamera uniqueID]).andReturn(@"1"); - OCMStub([frontFacingCamera position]).andReturn(AVCaptureDevicePositionFront); + MockCaptureDeviceController *frontFacingCamera = [[MockCaptureDeviceController alloc] init]; + frontFacingCamera.uniqueID = @"1"; + frontFacingCamera.position = AVCaptureDevicePositionFront; - AVCaptureDevice *ultraWideCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([ultraWideCamera uniqueID]).andReturn(@"2"); - OCMStub([ultraWideCamera position]).andReturn(AVCaptureDevicePositionBack); + MockCaptureDeviceController *ultraWideCamera = [[MockCaptureDeviceController alloc] init]; + ultraWideCamera.uniqueID = @"2"; + ultraWideCamera.position = AVCaptureDevicePositionBack; - AVCaptureDevice *telephotoCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([telephotoCamera uniqueID]).andReturn(@"3"); - OCMStub([telephotoCamera position]).andReturn(AVCaptureDevicePositionBack); + MockCaptureDeviceController *telephotoCamera = [[MockCaptureDeviceController alloc] init]; + telephotoCamera.uniqueID = @"3"; + telephotoCamera.position = AVCaptureDevicePositionBack; NSMutableArray *requiredTypes = [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ] @@ -43,21 +62,23 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - id discoverySessionMock = OCMClassMock([AVCaptureDeviceDiscoverySession class]); - OCMStub([discoverySessionMock discoverySessionWithDeviceTypes:requiredTypes - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]) - .andReturn(discoverySessionMock); - NSMutableArray *cameras = [NSMutableArray array]; [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera, telephotoCamera ]]; if (@available(iOS 13.0, *)) { [cameras addObject:ultraWideCamera]; } - OCMStub([discoverySessionMock devices]).andReturn([NSArray arrayWithArray:cameras]); + + _mockDeviceDiscovery.discoverySessionStub = ^NSArray> *_Nullable( + NSArray *_Nonnull deviceTypes, AVMediaType _Nonnull mediaType, + AVCaptureDevicePosition position) { + XCTAssertEqualObjects(deviceTypes, requiredTypes); + XCTAssertEqual(mediaType, AVMediaTypeVideo); + XCTAssertEqual(position, AVCaptureDevicePositionUnspecified); + return cameras; + }; __block NSArray *resultValue; - [camera + [_cameraPlugin availableCamerasWithCompletion:^(NSArray *_Nullable result, FlutterError *_Nullable error) { XCTAssertNil(error); @@ -74,17 +95,16 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { } } - (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; // iPhone 8 Cameras: - AVCaptureDevice *wideAngleCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([wideAngleCamera uniqueID]).andReturn(@"0"); - OCMStub([wideAngleCamera position]).andReturn(AVCaptureDevicePositionBack); + MockCaptureDeviceController *wideAngleCamera = [[MockCaptureDeviceController alloc] init]; + wideAngleCamera.uniqueID = @"0"; + wideAngleCamera.position = AVCaptureDevicePositionBack; - AVCaptureDevice *frontFacingCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([frontFacingCamera uniqueID]).andReturn(@"1"); - OCMStub([frontFacingCamera position]).andReturn(AVCaptureDevicePositionFront); + MockCaptureDeviceController *frontFacingCamera = [[MockCaptureDeviceController alloc] init]; + frontFacingCamera.uniqueID = @"1"; + frontFacingCamera.position = AVCaptureDevicePositionFront; NSMutableArray *requiredTypes = [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ] @@ -93,18 +113,20 @@ - (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone { [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - id discoverySessionMock = OCMClassMock([AVCaptureDeviceDiscoverySession class]); - OCMStub([discoverySessionMock discoverySessionWithDeviceTypes:requiredTypes - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]) - .andReturn(discoverySessionMock); - NSMutableArray *cameras = [NSMutableArray array]; [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera ]]; - OCMStub([discoverySessionMock devices]).andReturn([NSArray arrayWithArray:cameras]); + + _mockDeviceDiscovery.discoverySessionStub = ^NSArray> *_Nullable( + NSArray *_Nonnull deviceTypes, AVMediaType _Nonnull mediaType, + AVCaptureDevicePosition position) { + XCTAssertEqualObjects(deviceTypes, requiredTypes); + XCTAssertEqual(mediaType, AVMediaTypeVideo); + XCTAssertEqual(position, AVCaptureDevicePositionUnspecified); + return cameras; + }; __block NSArray *resultValue; - [camera + [_cameraPlugin availableCamerasWithCompletion:^(NSArray *_Nullable result, FlutterError *_Nullable error) { XCTAssertNil(error); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m index 7b641a5746c0..28a5f7e1fcce 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m @@ -5,51 +5,74 @@ @import camera_avfoundation; @import XCTest; @import AVFoundation; -#import -@interface FLTCam : NSObject - -- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y; -@end +#import "MockCaptureDeviceController.h" +#import "MockDeviceOrientationProvider.h" @interface CameraExposureTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; -@property(readonly, nonatomic) id mockDevice; -@property(readonly, nonatomic) id mockUIDevice; +@property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; +@property(readonly, nonatomic) MockDeviceOrientationProvider *mockDeviceOrientationProvider; @end @implementation CameraExposureTests - (void)setUp { _camera = [[FLTCam alloc] init]; - _mockDevice = OCMClassMock([AVCaptureDevice class]); - _mockUIDevice = OCMPartialMock([UIDevice currentDevice]); -} + _mockDevice = [[MockCaptureDeviceController alloc] init]; + _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; -- (void)tearDown { - [_mockDevice stopMocking]; - [_mockUIDevice stopMocking]; + [_camera setValue:_mockDevice forKey:@"captureDevice"]; + [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; } -- (void)testSetExpsourePointWithResult_SetsExposurePointOfInterest { +- (void)testSetExposurePointWithResult_SetsExposurePointOfInterest { // UI is currently in landscape left orientation - OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft); + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Exposure point of interest is supported - OCMStub([_mockDevice isExposurePointOfInterestSupported]).andReturn(true); - // Set mock device as the current capture device - [_camera setValue:_mockDevice forKey:@"captureDevice"]; + _mockDevice.isExposurePointOfInterestSupported = YES; + + // Verify the focus point of interest has been set + __block CGPoint setPoint = CGPointZero; + _mockDevice.setExposurePointOfInterestStub = ^(CGPoint point) { + if (CGPointEqualToPoint(CGPointMake(1, 1), point)) { + setPoint = point; + } + }; // Run test + XCTestExpectation *completionExpectation = [self expectationWithDescription:@"Completion called"]; + [_camera setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNil(error); + [completionExpectation fulfill]; + }]; + + [self waitForExpectationsWithTimeout:1 handler:nil]; + XCTAssertEqual(setPoint.x, 1.0); + XCTAssertEqual(setPoint.y, 1.0); +} + +- (void)testSetExposurePoint_WhenNotSupported_ReturnsError { + // UI is currently in landscape left orientation + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; + // Exposure point of interest is not supported + _mockDevice.isExposurePointOfInterestSupported = NO; + + XCTestExpectation *expectation = [self expectationWithDescription:@"Completion with error"]; + + // Run [_camera - setExposurePointWithResult:^void(id _Nullable result) { - } - x:1 - y:1]; + setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNotNil(error); + XCTAssertEqualObjects(error.code, @"setExposurePointFailed"); + XCTAssertEqualObjects(error.message, @"Device does not have exposure point capabilities"); + [expectation fulfill]; + }]; - // Verify the focus point of interest has been set - OCMVerify([_mockDevice setExposurePointOfInterest:CGPointMake(1, 1)]); + // Verify + [self waitForExpectationsWithTimeout:1 handler:nil]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m index 0cb8333345ab..f7d96c3aa75c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m @@ -8,100 +8,123 @@ #endif @import XCTest; @import AVFoundation; -#import + +#import "MockCaptureDeviceController.h" +#import "MockDeviceOrientationProvider.h" @interface CameraFocusTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; -@property(readonly, nonatomic) id mockDevice; -@property(readonly, nonatomic) id mockUIDevice; +@property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; +@property(readonly, nonatomic) MockDeviceOrientationProvider *mockDeviceOrientationProvider; @end @implementation CameraFocusTests - (void)setUp { _camera = [[FLTCam alloc] init]; - _mockDevice = OCMClassMock([AVCaptureDevice class]); - _mockUIDevice = OCMPartialMock([UIDevice currentDevice]); -} + _mockDevice = [[MockCaptureDeviceController alloc] init]; + _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; -- (void)tearDown { - [_mockDevice stopMocking]; - [_mockUIDevice stopMocking]; + [_camera setValue:_mockDevice forKey:@"captureDevice"]; + [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; } - (void)testAutoFocusWithContinuousModeSupported_ShouldSetContinuousAutoFocus { - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true); - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true); - - // Don't expect setFocusMode:AVCaptureFocusModeAutoFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; + // AVCaptureFocusModeContinuousAutoFocus and AVCaptureFocusModeContinuousAutoFocus are supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeContinuousAutoFocus || mode == AVCaptureFocusModeAutoFocus; + }; + + __block BOOL setFocusModeContinuousAutoFocusCalled = NO; + + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + // Don't expect setFocusMode:AVCaptureFocusModeAutoFocus + if (mode == AVCaptureFocusModeAutoFocus) { + XCTFail(@"Unexpected call to setFocusMode"); + } else if (mode == AVCaptureFocusModeContinuousAutoFocus) { + setFocusModeContinuousAutoFocusCalled = YES; + } + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; // Expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus - OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]); + XCTAssertTrue(setFocusModeContinuousAutoFocusCalled); } - (void)testAutoFocusWithContinuousModeNotSupported_ShouldSetAutoFocus { // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) - .andReturn(false); - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true); + // AVCaptureFocusModeAutoFocus is supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeAutoFocus; + }; + + __block BOOL setFocusModeAutoFocusCalled = NO; // Don't expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + if (mode == AVCaptureFocusModeContinuousAutoFocus) { + XCTFail(@"Unexpected call to setFocusMode"); + } else if (mode == AVCaptureFocusModeAutoFocus) { + setFocusModeAutoFocusCalled = YES; + } + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; // Expect setFocusMode:AVCaptureFocusModeAutoFocus - OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]); + XCTAssertTrue(setFocusModeAutoFocusCalled); } - (void)testAutoFocusWithNoModeSupported_ShouldSetNothing { - // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) - .andReturn(false); - // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(false); + // No modes are supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return NO; + }; // Don't expect any setFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + XCTFail(@"Unexpected call to setFocusMode"); + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; } - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus { - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true); - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true); - - // Don't expect any setFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + // AVCaptureFocusModeContinuousAutoFocus and AVCaptureFocusModeAutoFocus are supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeContinuousAutoFocus || mode == AVCaptureFocusModeAutoFocus; + }; + + __block BOOL setFocusModeAutoFocusCalled = NO; + + // Expect only setFocusMode:AVCaptureFocusModeAutoFocus + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + if (mode == AVCaptureFocusModeContinuousAutoFocus) { + XCTFail(@"Unexpected call to setFocusMode"); + } else if (mode == AVCaptureFocusModeAutoFocus) { + setFocusModeAutoFocusCalled = YES; + } + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; - // Expect setFocusMode:AVCaptureFocusModeAutoFocus - OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]); + XCTAssertTrue(setFocusModeAutoFocusCalled); } - (void)testLockedFocusWithModeNotSupported_ShouldSetNothing { - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true); - // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(false); + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeContinuousAutoFocus; + }; // Don't expect any setFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + XCTFail(@"Unexpected call to setFocusMode"); + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; @@ -109,11 +132,16 @@ - (void)testLockedFocusWithModeNotSupported_ShouldSetNothing { - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest { // UI is currently in landscape left orientation - OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft); + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Focus point of interest is supported - OCMStub([_mockDevice isFocusPointOfInterestSupported]).andReturn(true); - // Set mock device as the current capture device - [_camera setValue:_mockDevice forKey:@"captureDevice"]; + _mockDevice.isFocusPointOfInterestSupported = YES; + + __block BOOL setFocusPointOfInterestCalled = NO; + _mockDevice.setFocusPointOfInterestStub = ^(CGPoint point) { + if (point.x == 1 && point.y == 1) { + setFocusPointOfInterestCalled = YES; + } + }; // Run test [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1] @@ -121,7 +149,28 @@ - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest { }]; // Verify the focus point of interest has been set - OCMVerify([_mockDevice setFocusPointOfInterest:CGPointMake(1, 1)]); + XCTAssertTrue(setFocusPointOfInterestCalled); +} + +- (void)testSetFocusPoint_WhenNotSupported_ReturnsError { + // UI is currently in landscape left orientation + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; + // Exposure point of interest is not supported + _mockDevice.isFocusPointOfInterestSupported = NO; + + XCTestExpectation *expectation = [self expectationWithDescription:@"Completion with error"]; + + // Run + [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNotNil(error); + XCTAssertEqualObjects(error.code, @"setFocusPointFailed"); + XCTAssertEqualObjects(error.message, @"Device does not have focus point capabilities"); + [expectation fulfill]; + }]; + + // Verify + [self waitForExpectationsWithTimeout:1 handler:nil]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m index 4df1994699df..978f63d2805b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m @@ -8,7 +8,9 @@ #endif @import XCTest; @import AVFoundation; -#import + +#import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" @interface CameraMethodChannelTests : XCTestCase @end @@ -16,18 +18,23 @@ @interface CameraMethodChannelTests : XCTestCase @implementation CameraMethodChannelTests - (void)testCreate_ShouldCallResultOnMainThread { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + MockCaptureSession *avCaptureSessionMock = [[MockCaptureSession alloc] init]; + avCaptureSessionMock.mockCanSetSessionPreset = YES; - XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; - // Set up mocks for initWithCameraName method - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil + sessionFactory:^id { + return avCaptureSessionMock; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; // Set up method call __block NSNumber *resultValue; @@ -51,15 +58,21 @@ - (void)testCreate_ShouldCallResultOnMainThread { } - (void)testDisposeShouldDeallocCamera { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + MockCaptureSession *avCaptureSessionMock = [[MockCaptureSession alloc] init]; + avCaptureSessionMock.mockCanSetSessionPreset = YES; - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil + sessionFactory:^id { + return avCaptureSessionMock; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *createExpectation = [self expectationWithDescription:@"create's result block must be called"]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index e6ce8d48bc5b..0ee82925dbb4 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -9,7 +9,10 @@ @import XCTest; @import Flutter; -#import +#import "MockCameraDeviceDiscovery.h" +#import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" +#import "MockDeviceOrientationProvider.h" @interface StubGlobalEventApi : FCPCameraGlobalEventApi @property(nonatomic) BOOL called; @@ -32,13 +35,68 @@ - (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(nonnull NSString @end +@interface MockCamera : FLTCam +@property(nonatomic, copy) void (^setDeviceOrientationStub)(UIDeviceOrientation orientation); +@end + +@implementation MockCamera +- (void)setDeviceOrientation:(UIDeviceOrientation)orientation { + if (self.setDeviceOrientationStub) { + self.setDeviceOrientationStub(orientation); + } +} + +@end + +@interface MockUIDevice : UIDevice +@property(nonatomic, assign) UIDeviceOrientation mockOrientation; +@end + +@implementation MockUIDevice +- (UIDeviceOrientation)orientation { + return self.mockOrientation; +} + +@end + #pragma mark - @interface CameraOrientationTests : XCTestCase +@property(readonly, nonatomic) MockCamera *camera; +@property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; +@property(readonly, nonatomic) StubGlobalEventApi *eventAPI; +@property(readonly, nonatomic) CameraPlugin *cameraPlugin; +@property(readonly, nonatomic) MockCameraDeviceDiscovery *deviceDiscovery; +@property(readonly, nonatomic) MockCaptureSession *captureSession; @end @implementation CameraOrientationTests +- (void)setUp { + [super setUp]; + _mockDevice = [[MockCaptureDeviceController alloc] init]; + _camera = [[MockCamera alloc] init]; + _eventAPI = [[StubGlobalEventApi alloc] init]; + _deviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; + _captureSession = [[MockCaptureSession alloc] init]; + + [_camera setValue:_mockDevice forKey:@"captureDevice"]; + + __weak typeof(self) weakSelf = self; + + _cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:_eventAPI + deviceDiscovery:_deviceDiscovery + sessionFactory:^id { + return weakSelf.captureSession; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; + _cameraPlugin.camera = _camera; +} + // Ensure that the given queue and then the main queue have both cycled, to wait for any pending // async events that may have been bounced between them. - (void)waitForRoundTripWithQueue:(dispatch_queue_t)queue { @@ -57,37 +115,36 @@ - (void)sendOrientation:(UIDeviceOrientation)orientation toCamera:(CameraPlugin } - (void)testOrientationNotifications { - StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; - CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:eventAPI]; - - [self sendOrientation:UIDeviceOrientationPortraitUpsideDown toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitDown); - [self sendOrientation:UIDeviceOrientationPortrait toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitUp); - [self sendOrientation:UIDeviceOrientationLandscapeLeft toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeLeft); - [self sendOrientation:UIDeviceOrientationLandscapeRight toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeRight); + [self sendOrientation:UIDeviceOrientationPortraitUpsideDown toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitDown); + [self sendOrientation:UIDeviceOrientationPortrait toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitUp); + [self sendOrientation:UIDeviceOrientationLandscapeLeft toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeLeft); + [self sendOrientation:UIDeviceOrientationLandscapeRight toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeRight); } - (void)testOrientationNotificationsNotCalledForFaceUp { - StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; - CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:eventAPI]; - - [self sendOrientation:UIDeviceOrientationFaceUp toCamera:cameraPlugin]; + [self sendOrientation:UIDeviceOrientationFaceUp toCamera:_cameraPlugin]; - XCTAssertFalse(eventAPI.called); + XCTAssertFalse(_eventAPI.called); } - (void)testOrientationNotificationsNotCalledForFaceDown { StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; + + __weak typeof(self) weakSelf = self; CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:eventAPI]; + messenger:nil + globalAPI:eventAPI + deviceDiscovery:_deviceDiscovery + sessionFactory:^id { + return weakSelf.captureSession; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; [self sendOrientation:UIDeviceOrientationFaceDown toCamera:cameraPlugin]; @@ -98,52 +155,66 @@ - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { XCTestExpectation *queueExpectation = [self expectationWithDescription:@"Orientation update must happen on the capture session queue"]; - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; const char *captureSessionQueueSpecific = "capture_session_queue"; - dispatch_queue_set_specific(camera.captureSessionQueue, captureSessionQueueSpecific, + dispatch_queue_set_specific(plugin.captureSessionQueue, captureSessionQueueSpecific, (void *)captureSessionQueueSpecific, NULL); - FLTCam *mockCam = OCMClassMock([FLTCam class]); - camera.camera = mockCam; - OCMStub([mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft]) - .andDo(^(NSInvocation *invocation) { - if (dispatch_get_specific(captureSessionQueueSpecific)) { - [queueExpectation fulfill]; - } - }); + plugin.camera = _camera; + + _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { + if (dispatch_get_specific(captureSessionQueueSpecific)) { + [queueExpectation fulfill]; + } + }; - [camera orientationChanged: + [plugin orientationChanged: [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]]; [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testOrientationChanged_noRetainCycle { dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - FLTCam *mockCam = OCMClassMock([FLTCam class]); - StubGlobalEventApi *stubAPI = [[StubGlobalEventApi alloc] init]; - __weak CameraPlugin *weakCamera; + __weak CameraPlugin *weakPlugin; + __weak typeof(self) weakSelf = self; @autoreleasepool { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:stubAPI]; - weakCamera = camera; - camera.captureSessionQueue = captureSessionQueue; - camera.camera = mockCam; - - [camera orientationChanged: + CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:_eventAPI + deviceDiscovery:_deviceDiscovery + sessionFactory:^id { + return weakSelf.captureSession; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; + weakPlugin = plugin; + plugin.captureSessionQueue = captureSessionQueue; + plugin.camera = _camera; + + [plugin orientationChanged: [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]]; } // Sanity check - XCTAssertNil(weakCamera, @"Camera must have been deallocated."); + XCTAssertNil(weakPlugin, @"Camera must have been deallocated."); + + __block BOOL setDeviceOrientationCalled = NO; + _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { + if (orientation == UIDeviceOrientationLandscapeLeft) { + setDeviceOrientationCalled = YES; + } + }; + + __weak StubGlobalEventApi *weakEventAPI = _eventAPI; // Must check in captureSessionQueue since orientationChanged dispatches to this queue. XCTestExpectation *expectation = [self expectationWithDescription:@"Dispatched to capture session queue"]; dispatch_async(captureSessionQueue, ^{ - OCMVerify(never(), [mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft]); - XCTAssertFalse(stubAPI.called); + XCTAssertFalse(setDeviceOrientationCalled); + XCTAssertFalse(weakEventAPI.called); [expectation fulfill]; }); @@ -151,8 +222,8 @@ - (void)testOrientationChanged_noRetainCycle { } - (NSNotification *)createMockNotificationForOrientation:(UIDeviceOrientation)deviceOrientation { - UIDevice *mockDevice = OCMClassMock([UIDevice class]); - OCMStub([mockDevice orientation]).andReturn(deviceOrientation); + MockUIDevice *mockDevice = [[MockUIDevice alloc] init]; + mockDevice.mockOrientation = deviceOrientation; return [NSNotification notificationWithName:@"orientation_test" object:mockDevice]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m index 04bdd0795dab..60dfca541683 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m @@ -8,7 +8,6 @@ #endif @import XCTest; @import AVFoundation; -#import @interface CameraPreviewPauseTests : XCTestCase @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m index 08cba70bf3a2..4c270cd864b6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m @@ -9,8 +9,9 @@ @import AVFoundation; @import XCTest; -#import #import "CameraTestUtils.h" +#import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" /// Includes test cases related to resolution presets setting operations for FLTCam class. @interface FLTCamSessionPresetsTest : XCTestCase @@ -20,62 +21,90 @@ @implementation FLTCamSessionPresetsTest - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPresetInputPriority; - - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); - - id captureFormatMock = OCMClassMock([AVCaptureDeviceFormat class]); - id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub([captureDeviceMock formats]).andReturn(@[ captureFormatMock ]); - - OCMExpect([captureDeviceMock activeFormat]).andReturn(captureFormatMock); - OCMExpect([captureDeviceMock lockForConfiguration:NULL]).andReturn(YES); - OCMExpect([videoSessionMock setSessionPreset:expectedPreset]); - - FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, FCPPlatformResolutionPresetMax, - captureDeviceMock, - ^CMVideoDimensions(AVCaptureDeviceFormat *format) { - CMVideoDimensions videoDimensions; - videoDimensions.width = 1; - videoDimensions.height = 1; - return videoDimensions; - }); - - OCMVerifyAll(captureDeviceMock); - OCMVerifyAll(videoSessionMock); + XCTestExpectation *presetExpectation = [self expectationWithDescription:@"Expected preset set"]; + + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMax); + + MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; + MockCaptureDeviceFormat *fakeFormat = [[MockCaptureDeviceFormat alloc] init]; + captureDeviceMock.formats = @[ fakeFormat ]; + captureDeviceMock.activeFormat = fakeFormat; + configuration.captureDeviceFactory = ^id _Nonnull { + return captureDeviceMock; + }; + + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + if (preset == expectedPreset) { + [presetExpectation fulfill]; + } + }; + configuration.videoCaptureSession = videoSessionMock; + + configuration.videoDimensionsForFormat = ^CMVideoDimensions(id format) { + CMVideoDimensions videoDimensions; + videoDimensions.width = 1; + videoDimensions.height = 1; + return videoDimensions; + }; + + FLTCreateCamWithConfiguration(configuration); + + [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testResolutionPresetWithCanSetSessionPresetMax_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPreset3840x2160; + XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; // Make sure that setting resolution preset for session always succeeds. - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - OCMExpect([videoSessionMock setSessionPreset:expectedPreset]); - - FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetMax); - - OCMVerifyAll(videoSessionMock); + videoSessionMock.mockCanSetSessionPreset = YES; + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + if (preset == expectedPreset) { + [expectation fulfill]; + } + }; + + configuration.videoCaptureSession = videoSessionMock; + configuration.mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMax); + configuration.captureDeviceFactory = ^id _Nonnull { + return [[MockCaptureDeviceController alloc] init]; + }; + + FLTCreateCamWithConfiguration(configuration); + + [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPreset3840x2160; + XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; // Make sure that setting resolution preset for session always succeeds. - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - + videoSessionMock.mockCanSetSessionPreset = YES; // Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession. - OCMExpect([videoSessionMock setSessionPreset:expectedPreset]); + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + if (preset == expectedPreset) { + [expectation fulfill]; + } + }; + + configuration.videoCaptureSession = videoSessionMock; + configuration.mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetUltraHigh); + configuration.captureDeviceFactory = ^id _Nonnull { + return [[MockCaptureDeviceController alloc] init]; + }; - FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetUltraHigh); + FLTCreateCamWithConfiguration(configuration); - OCMVerifyAll(videoSessionMock); + [self waitForExpectationsWithTimeout:1 handler:nil]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 9bed6bea4883..cc4daf6fda9c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -8,8 +8,10 @@ #endif @import XCTest; @import AVFoundation; -#import #import "CameraTestUtils.h" +#import "MockAssetWriter.h" +#import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" static const FCPPlatformResolutionPreset gTestResolutionPreset = FCPPlatformResolutionPresetMedium; static const int gTestFramesPerSecond = 15; @@ -65,11 +67,11 @@ - (void)unlockDevice:(AVCaptureDevice *)captureDevice { [_unlockExpectation fulfill]; } -- (void)beginConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { [_beginConfigurationExpectation fulfill]; } -- (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { [_commitConfigurationExpectation fulfill]; } @@ -93,17 +95,16 @@ - (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)capture } } -- (AVAssetWriterInput *)assetWriterAudioInputWithOutputSettings: +- (id)assetWriterAudioInputWithOutputSettings: (nullable NSDictionary *)outputSettings { if ([outputSettings[AVEncoderBitRateKey] isEqual:@(gTestAudioBitrate)]) { [_audioSettingsExpectation fulfill]; } - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio - outputSettings:outputSettings]; + return [[MockAssetWriterInput alloc] init]; } -- (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: +- (id)assetWriterVideoInputWithOutputSettings: (nullable NSDictionary *)outputSettings { if ([outputSettings[AVVideoCompressionPropertiesKey] isKindOfClass:[NSMutableDictionary class]]) { NSDictionary *compressionProperties = outputSettings[AVVideoCompressionPropertiesKey]; @@ -115,8 +116,7 @@ - (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: } } - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo - outputSettings:outputSettings]; + return [[MockAssetWriterInput alloc] init]; } - (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { @@ -143,8 +143,11 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { TestMediaSettingsAVWrapper *injectedWrapper = [[TestMediaSettingsAVWrapper alloc] initWithTestCase:self]; - FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, injectedWrapper, nil); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.mediaSettings = settings; + configuration.mediaSettingsWrapper = injectedWrapper; + + FLTCam *camera = FLTCreateCamWithConfiguration(configuration); // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ @@ -167,19 +170,23 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { } - (void)testSettings_ShouldBeSupportedByMethodCall { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; + MockCaptureSession *mockSession = [[MockCaptureSession alloc] init]; + mockSession.mockCanSetSessionPreset = YES; + + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil + sessionFactory:^id { + return mockSession; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; - // Set up mocks for initWithCameraName method - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); - - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - // Set up method call FCPPlatformMediaSettings *mediaSettings = [FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset @@ -209,11 +216,12 @@ - (void)testSettings_ShouldSelectFormatWhichSupports60FPS { videoBitrate:@(gTestVideoBitrate) audioBitrate:@(gTestAudioBitrate) enableAudio:gTestEnableAudio]; + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.mediaSettings = settings; - FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, nil, nil); + FLTCam *camera = FLTCreateCamWithConfiguration(configuration); - AVFrameRateRange *range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; + id range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; XCTAssertLessThanOrEqual(range.minFrameRate, 60); XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60); } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index 2bbb56c51a79..dd00c6b24ff8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -9,38 +9,14 @@ NS_ASSUME_NONNULL_BEGIN -/// Creates an `FLTCam` that runs its capture session operations on a given queue. -/// @param captureSessionQueue the capture session queue -/// @param mediaSettings media settings configuration parameters -/// @param mediaSettingsAVWrapper provider to perform media settings operations (for unit test -/// dependency injection). -/// @param captureDeviceFactory a callback to create capture device instances -/// @return an FLTCam object. -extern FLTCam *_Nullable FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_t _Nullable captureSessionQueue, - FCPPlatformMediaSettings *_Nullable mediaSettings, - FLTCamMediaSettingsAVWrapper *_Nullable mediaSettingsAVWrapper, - CaptureDeviceFactory _Nullable captureDeviceFactory); - -extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue); - -/// Creates an `FLTCam` with a given captureSession and resolutionPreset -/// @param captureSession AVCaptureSession for video -/// @param resolutionPreset preset for camera's captureSession resolution -/// @return an FLTCam object. -extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession, - FCPPlatformResolutionPreset resolutionPreset); - -/// Creates an `FLTCam` with a given captureSession and resolutionPreset. -/// Allows to inject a capture device and a block to compute the video dimensions. -/// @param captureSession AVCaptureSession for video -/// @param resolutionPreset preset for camera's captureSession resolution -/// @param captureDevice AVCaptureDevice to be used -/// @param videoDimensionsForFormat custom code to determine video dimensions -/// @return an FLTCam object. -extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset, - AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat); +extern FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( + FCPPlatformResolutionPreset resolutionPreset); + +/// Creates a test `FLTCamConfiguration` that has a default mock setup. +extern FLTCamConfiguration *FLTCreateTestConfiguration(void); + +/// Creates an `FLTCam` with a test configuration. +extern FLTCam *FLTCreateCamWithConfiguration(FLTCamConfiguration *configuration); /// Creates a test sample buffer. /// @return a test sample buffer. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 503a5c255c59..b445701d01e6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -4,12 +4,15 @@ #import "CameraTestUtils.h" -#import @import AVFoundation; @import camera_avfoundation; -static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( - FCPPlatformResolutionPreset resolutionPreset) { +#import "MockAssetWriter.h" +#import "MockCaptureDeviceController.h" +#import "MockCapturePhotoSettings.h" +#import "MockCaptureSession.h" + +FCPPlatformMediaSettings *FCPGetDefaultMediaSettings(FCPPlatformResolutionPreset resolutionPreset) { return [FCPPlatformMediaSettings makeWithResolutionPreset:resolutionPreset framesPerSecond:nil videoBitrate:nil @@ -17,155 +20,69 @@ enableAudio:YES]; } -FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) { - return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, nil); -} - -FLTCam *FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_t captureSessionQueue, FCPPlatformMediaSettings *mediaSettings, - FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper, - CaptureDeviceFactory captureDeviceFactory) { - if (!mediaSettings) { - mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium); - } - - if (!mediaSettingsAVWrapper) { - mediaSettingsAVWrapper = [[FLTCamMediaSettingsAVWrapper alloc] init]; - } - - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock beginConfiguration]) - .andDo(^(NSInvocation *invocation){ - }); - OCMStub([videoSessionMock commitConfiguration]) - .andDo(^(NSInvocation *invocation){ - }); - - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]); - OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3); - OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30); - id captureDeviceFormatMock1 = OCMClassMock([AVCaptureDeviceFormat class]); - OCMStub([captureDeviceFormatMock1 videoSupportedFrameRateRanges]).andReturn(@[ - frameRateRangeMock1 - ]); - - id frameRateRangeMock2 = OCMClassMock([AVFrameRateRange class]); - OCMStub([frameRateRangeMock2 minFrameRate]).andReturn(3); - OCMStub([frameRateRangeMock2 maxFrameRate]).andReturn(60); - id captureDeviceFormatMock2 = OCMClassMock([AVCaptureDeviceFormat class]); - OCMStub([captureDeviceFormatMock2 videoSupportedFrameRateRanges]).andReturn(@[ - frameRateRangeMock2 - ]); - - id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub([captureDeviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES); - OCMStub([captureDeviceMock formats]).andReturn((@[ - captureDeviceFormatMock1, captureDeviceFormatMock2 - ])); - __block AVCaptureDeviceFormat *format = captureDeviceFormatMock1; - OCMStub([captureDeviceMock setActiveFormat:[OCMArg any]]).andDo(^(NSInvocation *invocation) { - [invocation retainArguments]; - [invocation getArgument:&format atIndex:2]; - }); - OCMStub([captureDeviceMock activeFormat]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&format]; - }); - - id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:UIDeviceOrientationPortrait - videoCaptureSession:videoSessionMock - audioCaptureSession:audioSessionMock - captureSessionQueue:captureSessionQueue - captureDeviceFactory:captureDeviceFactory ?: ^AVCaptureDevice *(void) { - return captureDeviceMock; - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - error:nil]; - - id captureVideoDataOutputMock = [OCMockObject niceMockForClass:[AVCaptureVideoDataOutput class]]; - - OCMStub([captureVideoDataOutputMock new]).andReturn(captureVideoDataOutputMock); - - OCMStub([captureVideoDataOutputMock - recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4]) - .andReturn(@{}); - - OCMStub([captureVideoDataOutputMock sampleBufferCallbackQueue]).andReturn(captureSessionQueue); - - id videoMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([videoMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(videoMock); - - id writerInputMock = [OCMockObject niceMockForClass:[AVAssetWriterInput class]]; - - OCMStub([writerInputMock assetWriterInputWithMediaType:AVMediaTypeAudio - outputSettings:[OCMArg any]]) - .andReturn(writerInputMock); - - OCMStub([writerInputMock assetWriterInputWithMediaType:AVMediaTypeVideo - outputSettings:[OCMArg any]]) - .andReturn(writerInputMock); - - return fltCam; -} - -FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession, - FCPPlatformResolutionPreset resolutionPreset) { - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - return [[FLTCam alloc] initWithCameraName:@"camera" - mediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - error:nil]; +FLTCamConfiguration *FLTCreateTestConfiguration(void) { + FCPPlatformMediaSettings *mediaSettings = + FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium); + FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper = + [[FLTCamMediaSettingsAVWrapper alloc] init]; + MockAssetWriter *assetWriter = [[MockAssetWriter alloc] init]; + MockPixelBufferAdaptor *pixelBufferAdaptor = [[MockPixelBufferAdaptor alloc] init]; + + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; + videoSessionMock.mockCanSetSessionPreset = YES; + + MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; + audioSessionMock.mockCanSetSessionPreset = YES; + + __block MockCaptureDeviceController *mockDevice = [[MockCaptureDeviceController alloc] init]; + + MockFrameRateRange *frameRateRange1 = [[MockFrameRateRange alloc] initWithMinFrameRate:3 + maxFrameRate:30]; + MockCaptureDeviceFormat *captureDeviceFormatMock1 = [[MockCaptureDeviceFormat alloc] init]; + captureDeviceFormatMock1.videoSupportedFrameRateRanges = @[ frameRateRange1 ]; + + MockFrameRateRange *frameRateRange2 = [[MockFrameRateRange alloc] initWithMinFrameRate:3 + maxFrameRate:60]; + MockCaptureDeviceFormat *captureDeviceFormatMock2 = [[MockCaptureDeviceFormat alloc] init]; + captureDeviceFormatMock2.videoSupportedFrameRateRanges = @[ frameRateRange2 ]; + + MockCaptureInput *inputMock = [[MockCaptureInput alloc] init]; + + mockDevice.formats = @[ captureDeviceFormatMock1, captureDeviceFormatMock2 ]; + mockDevice.activeFormat = captureDeviceFormatMock1; + mockDevice.inputToReturn = inputMock; + + FLTCamConfiguration *configuration = + [[FLTCamConfiguration alloc] initWithMediaSettings:mediaSettings + mediaSettingsWrapper:mediaSettingsAVWrapper + captureDeviceFactory:^id(void) { + return mockDevice; + } + captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) + captureSessionFactory:^id _Nonnull { + return videoSessionMock; + } + audioCaptureDeviceFactory:^id _Nonnull { + return mockDevice; + }]; + configuration.capturePhotoOutput = + [[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]]; + configuration.orientation = UIDeviceOrientationPortrait; + configuration.assetWriterFactory = ^id _Nonnull( + NSURL *_Nonnull url, AVFileType _Nonnull fileType, NSError **error) { + return assetWriter; + }; + configuration.pixelBufferAdaptorFactory = ^id _Nonnull( + id _Nonnull input, NSDictionary *_Nullable settings) { + return pixelBufferAdaptor; + }; + configuration.photoSettingsFactory = [[MockCapturePhotoSettingsFactory alloc] init]; + + return configuration; } -FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset, - AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) { - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - captureDeviceFactory:^AVCaptureDevice *(void) { - return captureDevice; - } - videoDimensionsForFormat:videoDimensionsForFormat - error:nil]; +FLTCam *FLTCreateCamWithConfiguration(FLTCamConfiguration *configuration) { + return [[FLTCam alloc] initWithConfiguration:configuration error:nil]; } CMSampleBufferRef FLTCreateTestSampleBuffer(void) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m index d1a835c36efe..d33d660812ed 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m @@ -5,7 +5,6 @@ @import camera_avfoundation; @import XCTest; @import AVFoundation; -#import @interface FLTCam : NSObject #import "CameraTestUtils.h" +#import "MockCaptureDeviceController.h" +#import "MockCapturePhotoOutput.h" /// Includes test cases related to photo capture operations for FLTCam class. @interface FLTCamPhotoCaptureTests : XCTestCase - +@property(readonly, nonatomic) FLTCam *cam; +@property(readonly, nonatomic) dispatch_queue_t captureSessionQueue; @end @implementation FLTCamPhotoCaptureTests +- (void)setUp { + _captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); + dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, + (void *)FLTCaptureSessionQueueSpecific, NULL); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = _captureSessionQueue; + _cam = FLTCreateCamWithConfiguration(configuration); +} + - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsWithError { XCTestExpectation *errorExpectation = [self expectationWithDescription: @"Must send error to result if save photo delegate completes with error."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); - NSError *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + __weak typeof(self) weakSelf = self; + + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id captureDelegate) { + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(nil, error); }); - }); - cam.capturePhotoOutput = mockOutput; + }; + + _cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { XCTAssertNil(result); XCTAssertNotNil(error); [errorExpectation fulfill]; @@ -62,32 +69,26 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi [self expectationWithDescription: @"Must send file path to result if save photo delegate completes with file path."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); - NSString *filePath = @"test"; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + __weak typeof(self) weakSelf = self; + + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id captureDelegate) { + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(filePath, nil); }); - }); - cam.capturePhotoOutput = mockOutput; + }; + _cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { XCTAssertEqual(result, filePath); [pathExpectation fulfill]; }]; @@ -99,36 +100,29 @@ - (void)testCaptureToFile_mustReportFileExtensionWithHeifWhenHEVCIsAvailableAndF XCTestExpectation *expectation = [self expectationWithDescription: @"Test must set extension to heif if availablePhotoCodecTypes contains HEVC."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; + [_cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - AVCapturePhotoSettings *settings = - [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettingsWithFormat:OCMOCK_ANY]).andReturn(settings); + __weak typeof(self) weakSelf = self; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); // Set availablePhotoCodecTypes to HEVC - NSArray *codecTypes = @[ AVVideoCodecTypeHEVC ]; - OCMStub([mockOutput availablePhotoCodecTypes]).andReturn(codecTypes); - - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + mockOutput.availablePhotoCodecTypes = @[ AVVideoCodecTypeHEVC ]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id photoDelegate) { + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(delegate.filePath, nil); }); - }); - cam.capturePhotoOutput = mockOutput; + }; + _cam.capturePhotoOutput = mockOutput; + // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { XCTAssertEqualObjects([filePath pathExtension], @"heif"); [expectation fulfill]; }]; @@ -140,74 +134,72 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF XCTestExpectation *expectation = [self expectationWithDescription: @"Test must set extension to jpg if availablePhotoCodecTypes does not contain HEVC."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); + [_cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); + __weak typeof(self) weakSelf = self; - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id photoDelegate) { + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(delegate.filePath, nil); }); - }); - cam.capturePhotoOutput = mockOutput; + }; + + _cam.capturePhotoOutput = mockOutput; + // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { XCTAssertEqualObjects([filePath pathExtension], @"jpg"); [expectation fulfill]; }]; }); [self waitForExpectationsWithTimeout:1 handler:nil]; } - +// - (void)testCaptureToFile_handlesTorchMode { XCTestExpectation *pathExpectation = [self expectationWithDescription: @"Must send file path to result if save photo delegate completes with file path."]; + XCTestExpectation *setTorchExpectation = + [self expectationWithDescription:@"Should set torch mode to AVCaptureTorchModeOn."]; - id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub([captureDeviceMock hasTorch]).andReturn(YES); - OCMStub([captureDeviceMock isTorchAvailable]).andReturn(YES); - OCMStub([captureDeviceMock torchMode]).andReturn(AVCaptureTorchModeAuto); - OCMExpect([captureDeviceMock setTorchMode:AVCaptureTorchModeOn]); + MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; + captureDeviceMock.hasTorch = YES; + captureDeviceMock.isTorchAvailable = YES; + captureDeviceMock.torchMode = AVCaptureTorchModeAuto; + captureDeviceMock.setTorchModeStub = ^(AVCaptureTorchMode mode) { + [setTorchExpectation fulfill]; + }; dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, - ^AVCaptureDevice *(void) { - return captureDeviceMock; - }); - - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = captureSessionQueue; + configuration.captureDeviceFactory = ^id(void) { + return captureDeviceMock; + }; + FLTCam *cam = FLTCreateCamWithConfiguration(configuration); NSString *filePath = @"test"; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id photoDelegate) { FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(filePath, nil); }); - }); + }; cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. @@ -221,6 +213,5 @@ - (void)testCaptureToFile_handlesTorchMode { }]; }); [self waitForExpectationsWithTimeout:1 handler:nil]; - OCMVerifyAll(captureDeviceMock); } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m index b6b78f2dab28..72aaecddfc4d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m @@ -8,32 +8,119 @@ #endif @import AVFoundation; @import XCTest; -#import #import "CameraTestUtils.h" +#import "MockAssetWriter.h" +#import "MockCaptureConnection.h" + +@import camera_avfoundation; +@import AVFoundation; + +@interface FakeMediaSettingsAVWrapper : FLTCamMediaSettingsAVWrapper +@property(readonly, nonatomic) MockAssetWriterInput *inputMock; +@end + +@implementation FakeMediaSettingsAVWrapper +- (instancetype)initWithInputMock:(MockAssetWriterInput *)inputMock { + _inputMock = inputMock; + return self; +} + +- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError **)outError { + return YES; +} + +- (void)unlockDevice:(AVCaptureDevice *)captureDevice { +} + +- (void)beginConfigurationForSession:(id)videoCaptureSession { +} + +- (void)commitConfigurationForSession:(id)videoCaptureSession { +} + +- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +} + +- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +} + +- (id)assetWriterAudioInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + return _inputMock; +} + +- (id)assetWriterVideoInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + return _inputMock; +} + +- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { +} + +- (NSDictionary *) + recommendedVideoSettingsForAssetWriterWithFileType:(AVFileType)fileType + forOutput:(AVCaptureVideoDataOutput *)output { + return @{}; +} +@end /// Includes test cases related to sample buffer handling for FLTCam class. @interface FLTCamSampleBufferTests : XCTestCase - +@property(readonly, nonatomic) dispatch_queue_t captureSessionQueue; +@property(readonly, nonatomic) FLTCam *camera; +@property(readonly, nonatomic) MockAssetWriter *writerMock; +@property(readonly, nonatomic) MockCaptureConnection *connectionMock; +@property(readonly, nonatomic) MockAssetWriterInput *inputMock; +@property(readonly, nonatomic) MockPixelBufferAdaptor *adaptorMock; +@property(readonly, nonatomic) FakeMediaSettingsAVWrapper *mediaSettingsWrapper; @end @implementation FLTCamSampleBufferTests +- (void)setUp { + _captureSessionQueue = dispatch_queue_create("testing", NULL); + _writerMock = [[MockAssetWriter alloc] init]; + _connectionMock = [[MockCaptureConnection alloc] init]; + _inputMock = [[MockAssetWriterInput alloc] init]; + _adaptorMock = [[MockPixelBufferAdaptor alloc] init]; + _mediaSettingsWrapper = [[FakeMediaSettingsAVWrapper alloc] initWithInputMock:_inputMock]; + + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = _captureSessionQueue; + configuration.mediaSettings = + [FCPPlatformMediaSettings makeWithResolutionPreset:FCPPlatformResolutionPresetMedium + framesPerSecond:nil + videoBitrate:nil + audioBitrate:nil + enableAudio:YES]; + configuration.mediaSettingsWrapper = _mediaSettingsWrapper; + + __weak typeof(self) weakSelf = self; + configuration.assetWriterFactory = + ^id _Nonnull(NSURL *url, AVFileType fileType, NSError **error) { + return weakSelf.writerMock; + }; + configuration.pixelBufferAdaptorFactory = ^id _Nonnull( + id input, NSDictionary *settings) { + return weakSelf.adaptorMock; + }; + + _camera = FLTCreateCamWithConfiguration(configuration); +} + - (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue { - dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - XCTAssertEqual(captureSessionQueue, cam.captureVideoOutput.sampleBufferCallbackQueue, + XCTAssertEqual(_captureSessionQueue, _camera.captureVideoOutput.sampleBufferCallbackQueue, @"Sample buffer callback queue must be the capture session queue."); } - (void)testCopyPixelBuffer { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL)); CMSampleBufferRef capturedSampleBuffer = FLTCreateTestSampleBuffer(); CVPixelBufferRef capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer); // Mimic sample buffer callback when captured a new video sample - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:capturedSampleBuffer - fromConnection:OCMClassMock([AVCaptureConnection class])]; - CVPixelBufferRef deliveriedPixelBuffer = [cam copyPixelBuffer]; + fromConnection:_connectionMock]; + CVPixelBufferRef deliveriedPixelBuffer = [_camera copyPixelBuffer]; XCTAssertEqual(deliveriedPixelBuffer, capturedPixelBuffer, @"FLTCam must deliver the latest captured pixel buffer to copyPixelBuffer API."); CFRelease(capturedSampleBuffer); @@ -41,32 +128,19 @@ - (void)testCopyPixelBuffer { } - (void)testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPauseResumeRecording { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL)); CMSampleBufferRef sampleBuffer = FLTCreateTestSampleBuffer(); - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - // Pause then resume the recording. - [cam + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - [cam pauseVideoRecording]; - [cam resumeVideoRecording]; + [_camera pauseVideoRecording]; + [_camera resumeVideoRecording]; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:sampleBuffer - fromConnection:OCMClassMock([AVCaptureConnection class])]; + fromConnection:_connectionMock]; XCTAssertEqual(CFGetRetainCount(sampleBuffer), 1, @"didOutputSampleBuffer must not change the sample buffer retain count after " @"pause resume recording."); @@ -74,55 +148,33 @@ - (void)testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPause } - (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - __block NSArray *writtenSamples = @[]; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - writtenSamples = [writtenSamples arrayByAddingObject:@"video"]; - }); - - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andReturn(YES); - OCMStub([inputMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) { + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + writtenSamples = [writtenSamples arrayByAddingObject:@"video"]; + return YES; + }; + + _inputMock.isReadyForMoreMediaData = YES; + _inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { writtenSamples = [writtenSamples arrayByAddingObject:@"audio"]; - }); + return YES; + }; - [cam + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; NSArray *expectedSamples = @[ @"video", @"audio" ]; XCTAssertEqualObjects(writtenSamples, expectedSamples, @"First appended sample must be video."); @@ -132,67 +184,41 @@ - (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples { } - (void)testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - __block BOOL videoAppended = NO; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - CMTime presentationTime; - [invocation getArgument:&presentationTime atIndex:3]; - XCTAssert(CMTIME_IS_NUMERIC(presentationTime)); - videoAppended = YES; - }); + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + XCTAssert(CMTIME_IS_NUMERIC(time)); + videoAppended = YES; + return YES; + }; __block BOOL audioAppended = NO; - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andReturn(YES); - OCMStub([inputMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) { - CMSampleBufferRef sampleBuffer; - [invocation getArgument:&sampleBuffer atIndex:2]; - CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + _inputMock.isReadyForMoreMediaData = YES; + _inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { + CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(buffer); XCTAssert(CMTIME_IS_NUMERIC(sampleTime)); audioAppended = YES; - }); + return YES; + }; - [cam + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - [cam pauseVideoRecording]; - [cam resumeVideoRecording]; + [_camera pauseVideoRecording]; + [_camera resumeVideoRecording]; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; - [cam captureOutput:cam.captureVideoOutput + fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; XCTAssert(videoAppended && audioAppended, @"Video or audio was not appended."); CFRelease(videoSample); @@ -200,139 +226,90 @@ - (void)testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume { } - (void)testDidOutputSampleBufferMustNotAppendSampleWhenReadyForMoreMediaDataIsNo { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block BOOL sampleAppended = NO; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - sampleAppended = YES; - }); - - __block BOOL readyForMoreMediaData = NO; - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&readyForMoreMediaData]; - }); - - [cam + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + sampleAppended = YES; + return YES; + }; + + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - readyForMoreMediaData = YES; + _inputMock.isReadyForMoreMediaData = YES; sampleAppended = NO; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssertTrue(sampleAppended, @"Sample was not appended."); - readyForMoreMediaData = NO; + _inputMock.isReadyForMoreMediaData = NO; sampleAppended = NO; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssertFalse(sampleAppended, @"Sample cannot be appended when readyForMoreMediaData is NO."); CFRelease(videoSample); } - (void)testStopVideoRecordingWithCompletionMustCallCompletion { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - - OCMStub([writerMock finishWritingWithCompletionHandler:[OCMArg checkWithBlock:^(id param) { - XCTAssert(status == AVAssetWriterStatusWriting, - @"Cannot call finishWritingWithCompletionHandler when status is " - @"not AVAssetWriterStatusWriting."); - void (^handler)(void) = param; - handler(); - return YES; - }]]); - - [cam + __weak MockAssetWriter *weakWriter = _writerMock; + _writerMock.finishWritingStub = ^(void (^param)(void)) { + XCTAssert(weakWriter.status == AVAssetWriterStatusWriting, + @"Cannot call finishWritingWithCompletionHandler when status is " + @"not AVAssetWriterStatusWriting."); + void (^handler)(void) = param; + handler(); + }; + + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; __block BOOL completionCalled = NO; - [cam stopVideoRecordingWithCompletion:^(NSString *_Nullable path, FlutterError *_Nullable error) { - completionCalled = YES; - }]; + [_camera + stopVideoRecordingWithCompletion:^(NSString *_Nullable path, FlutterError *_Nullable error) { + completionCalled = YES; + }]; XCTAssert(completionCalled, @"Completion was not called."); } - (void)testStartWritingShouldNotBeCalledBetweenSampleCreationAndAppending { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); __block BOOL startWritingCalled = NO; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { + _writerMock.startWritingStub = ^{ startWritingCalled = YES; - }); + }; __block BOOL videoAppended = NO; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - videoAppended = YES; - }); - - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andReturn(YES); - - [cam + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + videoAppended = YES; + return YES; + }; + + _inputMock.isReadyForMoreMediaData = YES; + + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; BOOL startWritingCalledBefore = startWritingCalled; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssert((startWritingCalledBefore && videoAppended) || (startWritingCalled && !videoAppended), @"The startWriting was called between sample creation and appending."); - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssert(videoAppended, @"Video was not appended."); CFRelease(videoSample); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m similarity index 52% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m index 02a610affaa5..a2490ba0d774 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m @@ -8,31 +8,60 @@ #endif @import AVFoundation; @import XCTest; -#import + #import "CameraTestUtils.h" +#import "FLTCameraPermissionManager.h" +#import "FLTPermissionService.h" + +@interface MockPermissionService : NSObject +@property(nonatomic, copy) AVAuthorizationStatus (^authorizationStatusStub)(AVMediaType mediaType); +@property(nonatomic, copy) void (^requestAccessStub)(AVMediaType mediaType, void (^handler)(BOOL)); +@end + +@implementation MockPermissionService +- (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { + return self.authorizationStatusStub ? self.authorizationStatusStub(mediaType) + : AVAuthorizationStatusNotDetermined; +} -@interface CameraPermissionTests : XCTestCase +- (void)requestAccessForMediaType:(AVMediaType)mediaType completionHandler:(void (^)(BOOL))handler { + if (self.requestAccessStub) { + self.requestAccessStub(mediaType, handler); + } +} +@end +@interface FLTCameraPermissionManagerTests : XCTestCase +@property(nonatomic, strong) FLTCameraPermissionManager *permissionManager; +@property(nonatomic, strong) MockPermissionService *mockService; @end -@implementation CameraPermissionTests +@implementation FLTCameraPermissionManagerTests + +- (void)setUp { + [super setUp]; + self.mockService = [[MockPermissionService alloc] init]; + self.permissionManager = + [[FLTCameraPermissionManager alloc] initWithPermissionService:self.mockService]; +} #pragma mark - camera permissions -- (void)testRequestCameraPermission_completeWithoutErrorIfPrevoiuslyAuthorized { +- (void)testRequestCameraPermission_completeWithoutErrorIfPreviouslyAuthorized { XCTestExpectation *expectation = [self expectationWithDescription: @"Must copmlete without error if camera access was previously authorized."]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusAuthorized); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusAuthorized; + }; - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied { @@ -45,14 +74,16 @@ - (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied { @"Settings to enable camera access." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusDenied); - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusDenied; + }; + + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -63,15 +94,16 @@ - (void)testRequestCameraPermission_completeWithErrorIfRestricted { message:@"Camera access is restricted. " details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusRestricted); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusRestricted; + }; - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -79,21 +111,22 @@ - (void)testRequestCameraPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusNotDetermined; + }; + // Mimic user choosing "allow" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeVideo - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(YES); - return YES; - }]]); + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + handler(YES); + }; - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [grantedExpectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -105,21 +138,22 @@ - (void)testRequestCameraPermission_completeWithErrorIfUserDenyAccess { message:@"User denied the camera access request." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusNotDetermined; + }; - // Mimic user choosing "deny" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeVideo - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(NO); - return YES; - }]]); - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + // Mimic user choosing "allow" in permission dialog. + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + handler(NO); + }; + + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -131,17 +165,19 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfPrevoiuslyAuthorized { [self expectationWithDescription: @"Must copmlete without error if audio access was previously authorized."]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusAuthorized); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusAuthorized; + }; - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } + - (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied { XCTestExpectation *expectation = [self expectationWithDescription: @@ -152,14 +188,16 @@ - (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied { @"Settings to enable audio access." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusDenied); - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusDenied; + }; + + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -170,15 +208,16 @@ - (void)testRequestAudioPermission_completeWithErrorIfRestricted { message:@"Audio access is restricted. " details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusRestricted); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusRestricted; + }; - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -186,21 +225,22 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusNotDetermined; + }; + // Mimic user choosing "allow" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeAudio - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(YES); - return YES; - }]]); + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + handler(YES); + }; - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [grantedExpectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -211,22 +251,22 @@ - (void)testRequestAudioPermission_completeWithErrorIfUserDenyAccess { message:@"User denied the audio access request." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusNotDetermined; + }; // Mimic user choosing "deny" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeAudio - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(NO); - return YES; - }]]); - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + handler(NO); + }; + + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); - + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m index c92d824e4696..413f31b69bfe 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m @@ -8,7 +8,8 @@ #endif @import AVFoundation; @import XCTest; -#import + +#import "MockPhotoData.h" @interface FLTSavePhotoDelegateTests : XCTestCase @@ -32,7 +33,7 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToCapture { }]; [delegate handlePhotoCaptureResultWithError:captureError - photoDataProvider:^NSData * { + photoDataProvider:^id { return nil; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -55,15 +56,14 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToWrite { [completionExpectation fulfill]; }]; - // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g. - // `XCTRunnerIDESession::logDebugMessage:`) on a private queue. - id mockData = OCMPartialMock([NSData data]); - OCMStub([mockData writeToFile:OCMOCK_ANY - options:NSDataWritingAtomic - error:[OCMArg setTo:ioError]]) - .andReturn(NO); + MockPhotoData *mockData = [[MockPhotoData alloc] init]; + mockData.writeToFileStub = ^BOOL(NSString *path, NSDataWritingOptions options, NSError **error) { + *error = ioError; + return NO; + }; + [delegate handlePhotoCaptureResultWithError:nil - photoDataProvider:^NSData * { + photoDataProvider:^id { return mockData; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -84,14 +84,13 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithFilePathIfSuccessToWrite { [completionExpectation fulfill]; }]; - // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g. - // `XCTRunnerIDESession::logDebugMessage:`) on a private queue. - id mockData = OCMPartialMock([NSData data]); - OCMStub([mockData writeToFile:filePath options:NSDataWritingAtomic error:[OCMArg setTo:nil]]) - .andReturn(YES); + MockPhotoData *mockData = [[MockPhotoData alloc] init]; + mockData.writeToFileStub = ^BOOL(NSString *path, NSDataWritingOptions options, NSError **error) { + return YES; + }; [delegate handlePhotoCaptureResultWithError:nil - photoDataProvider:^NSData * { + photoDataProvider:^id { return mockData; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -109,16 +108,13 @@ - (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue const char *ioQueueSpecific = "io_queue_specific"; dispatch_queue_set_specific(ioQueue, ioQueueSpecific, (void *)ioQueueSpecific, NULL); - // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g. - // `XCTRunnerIDESession::logDebugMessage:`) on a private queue. - id mockData = OCMPartialMock([NSData data]); - OCMStub([mockData writeToFile:OCMOCK_ANY options:NSDataWritingAtomic error:[OCMArg setTo:nil]]) - .andDo(^(NSInvocation *invocation) { - if (dispatch_get_specific(ioQueueSpecific)) { - [writeFileQueueExpectation fulfill]; - } - }) - .andReturn(YES); + MockPhotoData *mockData = [[MockPhotoData alloc] init]; + mockData.writeToFileStub = ^BOOL(NSString *path, NSDataWritingOptions options, NSError **error) { + if (dispatch_get_specific(ioQueueSpecific)) { + [writeFileQueueExpectation fulfill]; + } + return YES; + }; NSString *filePath = @"test"; FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc] @@ -129,7 +125,7 @@ - (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue }]; [delegate handlePhotoCaptureResultWithError:nil - photoDataProvider:^NSData * { + photoDataProvider:^id { if (dispatch_get_specific(ioQueueSpecific)) { [dataProviderQueueExpectation fulfill]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h new file mode 100644 index 000000000000..1f9e8478a966 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h @@ -0,0 +1,25 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockAssetWriter : NSObject +@property(nonatomic, assign) AVAssetWriterStatus status; +@property(nonatomic, copy) void (^getStatusStub)(void); +@property(nonatomic, copy) void (^startWritingStub)(void); +@property(nonatomic, copy) void (^finishWritingStub)(void (^)(void)); +@property(nonatomic, strong) NSError *error; +@end + +@interface MockAssetWriterInput : NSObject +@property(nonatomic, strong) AVAssetWriterInput *input; +@property(nonatomic, assign) BOOL isReadyForMoreMediaData; +@property(nonatomic, assign) BOOL expectsMediaDataInRealTime; +@property(nonatomic, copy) BOOL (^appendSampleBufferStub)(CMSampleBufferRef); +@end + +@interface MockPixelBufferAdaptor : NSObject +@property(nonatomic, copy) BOOL (^appendPixelBufferStub)(CVPixelBufferRef, CMTime); +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m new file mode 100644 index 000000000000..0d537c00f57a --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m @@ -0,0 +1,49 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockAssetWriter.h" + +@implementation MockAssetWriter +- (BOOL)startWriting { + if (self.startWritingStub) { + self.startWritingStub(); + } + self.status = AVAssetWriterStatusWriting; + return YES; +} + +- (void)finishWritingWithCompletionHandler:(void (^)(void))handler { + if (self.finishWritingStub) { + self.finishWritingStub(handler); + } else if (handler) { + handler(); + } +} + +- (void)startSessionAtSourceTime:(CMTime)startTime { +} + +- (void)addInput:(nonnull AVAssetWriterInput *)input { +} + +@end + +@implementation MockAssetWriterInput +- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer { + if (self.appendSampleBufferStub) { + return self.appendSampleBufferStub(sampleBuffer); + } + return YES; +} +@end + +@implementation MockPixelBufferAdaptor +- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer + withPresentationTime:(CMTime)presentationTime { + if (self.appendPixelBufferStub) { + return self.appendPixelBufferStub(pixelBuffer, presentationTime); + } + return YES; +} +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.h new file mode 100644 index 000000000000..88dfaf914b93 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.h @@ -0,0 +1,17 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCameraDeviceDiscovery : NSObject +@property(nonatomic, copy) + NSArray> *_Nullable (^discoverySessionStub) + (NSArray *deviceTypes, AVMediaType mediaType, + AVCaptureDevicePosition position); +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.m new file mode 100644 index 000000000000..19322e3e0f1a --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.m @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCameraDeviceDiscovery.h" + +@implementation MockCameraDeviceDiscovery + +- (NSArray> *) + discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position { + if (self.discoverySessionStub) { + return self.discoverySessionStub(deviceTypes, mediaType, position); + } + return @[]; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.h new file mode 100644 index 000000000000..2791cc2db1a9 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.h @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCaptureConnection : NSObject +@property(nonatomic, strong) AVCaptureConnection *connection; +@property(nonatomic, assign) BOOL videoMirrored; +@property(nonatomic, assign) AVCaptureVideoOrientation videoOrientation; +@property(nonatomic, strong) NSArray *inputPorts; +@property(nonatomic, assign) BOOL isVideoMirroringSupported; +@property(nonatomic, assign) BOOL isVideoOrientationSupported; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.m new file mode 100644 index 000000000000..dc8d423c8e21 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.m @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCaptureConnection.h" + +@implementation MockCaptureConnection { + NSArray *_inputPorts; +} + +- (NSArray *)inputPorts { + return _inputPorts; +} + +- (void)setInputPorts:(NSArray *)inputPorts { + _inputPorts = [inputPorts copy]; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.h new file mode 100644 index 000000000000..8e196bb2501c --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.h @@ -0,0 +1,95 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCaptureDeviceController : NSObject +@property(nonatomic, assign) NSString *uniqueID; + +// Position/Orientation +@property(nonatomic, assign) AVCaptureDevicePosition position; + +// Format/Configuration +@property(nonatomic, strong) id activeFormat; +@property(nonatomic, strong) NSArray> *formats; +@property(nonatomic, copy) void (^setActiveFormatStub)(id format); + +// Flash/Torch +@property(nonatomic, assign) BOOL hasFlash; +@property(nonatomic, assign) BOOL hasTorch; +@property(nonatomic, assign) BOOL isTorchAvailable; +@property(nonatomic, assign) AVCaptureTorchMode torchMode; +@property(nonatomic, copy) void (^setTorchModeStub)(AVCaptureTorchMode mode); +@property(nonatomic, assign) BOOL flashModeSupported; + +// Focus +@property(nonatomic, assign) BOOL isFocusPointOfInterestSupported; +@property(nonatomic, copy) BOOL (^isFocusModeSupportedStub)(AVCaptureFocusMode mode); +@property(nonatomic, assign) AVCaptureFocusMode focusMode; +@property(nonatomic, copy) void (^setFocusModeStub)(AVCaptureFocusMode mode); +@property(nonatomic, assign) CGPoint focusPointOfInterest; +@property(nonatomic, copy) void (^setFocusPointOfInterestStub)(CGPoint point); + +// Exposure +@property(nonatomic, assign) BOOL isExposurePointOfInterestSupported; +@property(nonatomic, assign) AVCaptureExposureMode exposureMode; +@property(nonatomic, assign) BOOL exposureModeSupported; +@property(nonatomic, copy) void (^setExposureModeStub)(AVCaptureExposureMode mode); +@property(nonatomic, assign) CGPoint exposurePointOfInterest; +@property(nonatomic, copy) void (^setExposurePointOfInterestStub)(CGPoint point); +@property(nonatomic, assign) float minExposureTargetBias; +@property(nonatomic, assign) float maxExposureTargetBias; +@property(nonatomic, copy) void (^setExposureTargetBiasStub) + (float bias, void (^_Nullable handler)(CMTime)); + +// Zoom +@property(nonatomic, assign) float maxAvailableVideoZoomFactor; +@property(nonatomic, assign) float minAvailableVideoZoomFactor; +@property(nonatomic, assign) float videoZoomFactor; +@property(nonatomic, copy) void (^setVideoZoomFactorStub)(float factor); + +// Camera Properties +@property(nonatomic, assign) float lensAperture; +@property(nonatomic, assign) CMTime exposureDuration; +@property(nonatomic, assign) float ISO; + +// Configuration Lock +@property(nonatomic, assign) BOOL shouldFailConfiguration; +@property(nonatomic, copy) void (^lockForConfigurationStub)(NSError **error); +@property(nonatomic, copy) void (^unlockForConfigurationStub)(void); + +// Frame Duration +@property(nonatomic, assign) CMTime activeVideoMinFrameDuration; +@property(nonatomic, assign) CMTime activeVideoMaxFrameDuration; +@property(nonatomic, copy) void (^setActiveVideoMinFrameDurationStub)(CMTime duration); +@property(nonatomic, copy) void (^setActiveVideoMaxFrameDurationStub)(CMTime duration); + +// Input Creation +@property(nonatomic, strong) id inputToReturn; +@property(nonatomic, copy) void (^createInputStub)(NSError **error); + +@end + +@interface MockCaptureDeviceFormat : NSObject +@property(nonatomic, strong) NSArray> *videoSupportedFrameRateRanges; +@property(nonatomic, assign) CMFormatDescriptionRef formatDescription; +@property(nonatomic, strong) AVCaptureDeviceFormat *format; + +- (instancetype)initWithDimensions:(CMVideoDimensions)dimensions; +@end + +@interface MockFrameRateRange : NSObject +- (instancetype)initWithMinFrameRate:(float)minFrameRate maxFrameRate:(float)maxFrameRate; +@property(nonatomic, readwrite) float minFrameRate; +@property(nonatomic, readwrite) float maxFrameRate; +@end + +@interface MockCaptureInput : NSObject +@property(nonatomic, strong) NSArray *ports; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.m new file mode 100644 index 000000000000..92a6036b15eb --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.m @@ -0,0 +1,161 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +#import "MockCaptureDeviceController.h" + +@implementation MockCaptureDeviceController +- (void)setActiveFormat:(id)format { + _activeFormat = format; + if (self.setActiveFormatStub) { + self.setActiveFormatStub(format); + } +} + +- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { + return self.flashModeSupported; +} + +- (void)setTorchMode:(AVCaptureTorchMode)mode { + _torchMode = mode; + if (self.setTorchModeStub) { + self.setTorchModeStub(mode); + } +} + +- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { + if (self.isFocusModeSupportedStub) { + return self.isFocusModeSupportedStub(mode); + } + return NO; +} + +- (void)setFocusMode:(AVCaptureFocusMode)mode { + _focusMode = mode; + if (self.setFocusModeStub) { + self.setFocusModeStub(mode); + } +} + +- (void)setFocusPointOfInterest:(CGPoint)point { + _focusPointOfInterest = point; + if (self.setFocusPointOfInterestStub) { + self.setFocusPointOfInterestStub(point); + } +} + +- (void)setExposureMode:(AVCaptureExposureMode)mode { + _exposureMode = mode; + if (self.setExposureModeStub) { + self.setExposureModeStub(mode); + } +} + +- (void)setExposurePointOfInterest:(CGPoint)point { + _exposurePointOfInterest = point; + if (self.setExposurePointOfInterestStub) { + self.setExposurePointOfInterestStub(point); + } +} + +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { + if (self.setExposureTargetBiasStub) { + self.setExposureTargetBiasStub(bias, handler); + } else if (handler) { + handler(kCMTimeZero); + } +} + +- (void)setVideoZoomFactor:(float)factor { + _videoZoomFactor = factor; + if (self.setVideoZoomFactorStub) { + self.setVideoZoomFactorStub(factor); + } +} + +- (BOOL)lockForConfiguration:(NSError **)error { + if (self.lockForConfigurationStub) { + self.lockForConfigurationStub(error); + return !self.shouldFailConfiguration; + } + if (self.shouldFailConfiguration) { + if (error) { + *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; + } + return NO; + } + return YES; +} + +- (void)unlockForConfiguration { + if (self.unlockForConfigurationStub) { + self.unlockForConfigurationStub(); + } +} + +- (void)setActiveVideoMinFrameDuration:(CMTime)duration { + _activeVideoMinFrameDuration = duration; + if (self.setActiveVideoMinFrameDurationStub) { + self.setActiveVideoMinFrameDurationStub(duration); + } +} + +- (void)setActiveVideoMaxFrameDuration:(CMTime)duration { + _activeVideoMaxFrameDuration = duration; + if (self.setActiveVideoMaxFrameDurationStub) { + self.setActiveVideoMaxFrameDurationStub(duration); + } +} + +- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { + return self.exposureModeSupported; +} + +- (id)createInput:(NSError *_Nullable *_Nullable)error { + if (self.createInputStub) { + self.createInputStub(error); + } + return self.inputToReturn; +} + +@end + +@implementation MockCaptureDeviceFormat + +- (void)dealloc { + if (_formatDescription) { + CFRelease(_formatDescription); + } +} + +- (instancetype)initWithDimensions:(CMVideoDimensions)dimensions { + self = [super init]; + if (self) { + CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCVPixelFormatType_32BGRA, dimensions.width, + dimensions.height, NULL, &_formatDescription); + } + return self; +} + +@end + +@implementation MockFrameRateRange + +- (instancetype)initWithMinFrameRate:(float)minFrameRate maxFrameRate:(float)maxFrameRate { + self = [super init]; + if (self) { + _minFrameRate = minFrameRate; + _maxFrameRate = maxFrameRate; + } + return self; +} + +@end + +@implementation MockCaptureInput +@synthesize ports; +@synthesize input; +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.h new file mode 100644 index 000000000000..1f886bb3a489 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.h @@ -0,0 +1,16 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockCapturePhotoOutput : NSObject +@property(nonatomic, copy) void (^capturePhotoWithSettingsStub) + (id, id); +@property(nonatomic, strong) NSArray *availablePhotoCodecTypes; +@property(nonatomic, strong) AVCapturePhotoOutput *photoOutput; +@property(nonatomic, assign, getter=isHighResolutionCaptureEnabled) + BOOL highResolutionCaptureEnabled; +@property(nonatomic, strong) NSArray *supportedFlashModes; +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.m new file mode 100644 index 000000000000..2a1721bee0a8 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.m @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCapturePhotoOutput.h" + +@implementation MockCapturePhotoOutput +- (void)capturePhotoWithSettings:(id)settings + delegate:(id)delegate { + if (self.capturePhotoWithSettingsStub) { + self.capturePhotoWithSettingsStub(settings, delegate); + } +} + +- (nullable AVCaptureConnection *)connectionWithMediaType:(nonnull AVMediaType)mediaType { + return nil; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h new file mode 100644 index 000000000000..066cb94cb531 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h @@ -0,0 +1,20 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockCapturePhotoSettings : NSObject +@property(nonatomic, strong) AVCapturePhotoSettings *settings; +@property(nonatomic, assign) int64_t uniqueID; +@property(nonatomic, copy) NSDictionary *format; +@property(nonatomic, assign) AVCaptureFlashMode flashMode; +@property(nonatomic, assign) BOOL highResolutionPhotoEnabled; +@end + +@interface MockCapturePhotoSettingsFactory : NSObject +@property(nonatomic, copy) id (^createPhotoSettingsStub)(void); +@property(nonatomic, copy) id (^createPhotoSettingsWithFormatStub) + (NSDictionary *); +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m new file mode 100644 index 000000000000..851334265cba --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m @@ -0,0 +1,23 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCapturePhotoSettings.h" + +@implementation MockCapturePhotoSettings +@end + +@implementation MockCapturePhotoSettingsFactory + +- (id)createPhotoSettings { + return self.createPhotoSettingsStub ? self.createPhotoSettingsStub() + : [[MockCapturePhotoSettings alloc] init]; +} + +- (id)createPhotoSettingsWithFormat: + (NSDictionary *)format { + return self.createPhotoSettingsWithFormatStub ? self.createPhotoSettingsWithFormatStub(format) + : [[MockCapturePhotoSettings alloc] init]; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.h new file mode 100644 index 000000000000..4fea3a6a1929 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.h @@ -0,0 +1,24 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCaptureSession : NSObject +@property(nonatomic, copy) void (^beginConfigurationStub)(void); +@property(nonatomic, copy) void (^commitConfigurationStub)(void); +@property(nonatomic, copy) void (^startRunningStub)(void); +@property(nonatomic, copy) void (^stopRunningStub)(void); +@property(nonatomic, copy) void (^setSessionPresetStub)(AVCaptureSessionPreset preset); + +@property(nonatomic, strong) NSMutableArray *inputs; +@property(nonatomic, strong) NSMutableArray *outputs; +@property(nonatomic, assign) BOOL mockCanSetSessionPreset; +@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; + +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.m new file mode 100644 index 000000000000..9adb54e90d6c --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.m @@ -0,0 +1,85 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCaptureSession.h" + +@implementation MockCaptureSession + +- (instancetype)init { + self = [super init]; + if (self) { + _inputs = [NSMutableArray array]; + _outputs = [NSMutableArray array]; + } + return self; +} + +- (void)beginConfiguration { + if (self.beginConfigurationStub) { + self.beginConfigurationStub(); + } +} + +- (void)commitConfiguration { + if (self.commitConfigurationStub) { + self.commitConfigurationStub(); + } +} + +- (void)startRunning { + if (self.startRunningStub) { + self.startRunningStub(); + } +} + +- (void)stopRunning { + if (self.stopRunningStub) { + self.stopRunningStub(); + } +} + +- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { + return self.mockCanSetSessionPreset; +} + +- (void)addConnection:(nonnull AVCaptureConnection *)connection { +} + +- (void)addInput:(nonnull AVCaptureInput *)input { +} + +- (void)addInputWithNoConnections:(nonnull AVCaptureInput *)input { +} + +- (void)addOutput:(nonnull AVCaptureOutput *)output { +} + +- (void)addOutputWithNoConnections:(nonnull AVCaptureOutput *)output { +} + +- (BOOL)canAddConnection:(nonnull AVCaptureConnection *)connection { + return YES; +} + +- (BOOL)canAddInput:(nonnull AVCaptureInput *)input { + return YES; +} + +- (BOOL)canAddOutput:(nonnull AVCaptureOutput *)output { + return YES; +} + +- (void)removeInput:(nonnull AVCaptureInput *)input { +} + +- (void)removeOutput:(nonnull AVCaptureOutput *)output { +} + +- (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { + if (_setSessionPresetStub) { + _setSessionPresetStub(sessionPreset); + } +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.h new file mode 100644 index 000000000000..855bebc998c7 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.h @@ -0,0 +1,14 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockDeviceOrientationProvider : NSObject +@property(nonatomic, assign) UIDeviceOrientation orientation; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.m new file mode 100644 index 000000000000..364b5b1fd60b --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.m @@ -0,0 +1,11 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +#import "MockDeviceOrientationProvider.h" + +@implementation MockDeviceOrientationProvider +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.h new file mode 100644 index 000000000000..fe5b34cf7552 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.h @@ -0,0 +1,14 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import Flutter; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockEventChannel : NSObject +@property(nonatomic, copy) void (^setStreamHandlerStub)(NSObject *); +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.m new file mode 100644 index 000000000000..7190cfb17714 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.m @@ -0,0 +1,15 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockEventChannel.h" + +@implementation MockEventChannel + +- (void)setStreamHandler:(NSObject *)handler { + if (self.setStreamHandlerStub) { + self.setStreamHandlerStub(handler); + } +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.h new file mode 100644 index 000000000000..f698b81bfca1 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.h @@ -0,0 +1,11 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockPhotoData : NSObject +@property(nonatomic, copy) BOOL (^writeToFileStub) + (NSString *path, NSDataWritingOptions options, NSError **error); +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.m new file mode 100644 index 000000000000..f15512c6f2ea --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.m @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockPhotoData.h" + +@implementation MockPhotoData + +- (BOOL)writeToFile:(NSString *)path + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr { + if (self.writeToFileStub) { + return _writeToFileStub(path, writeOptionsMask, errorPtr); + } + return YES; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m index 53c7c1da2c70..6974235bd7b7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m @@ -8,20 +8,69 @@ #endif @import XCTest; @import AVFoundation; -#import #import "CameraTestUtils.h" +@interface MockImageStreamHandler : FLTImageStreamHandler +@property(nonatomic, copy) void (^onEventSinkCalled)(id event); +@end + +@implementation MockImageStreamHandler + +- (FlutterEventSink)eventSink { + if (self.onEventSinkCalled) { + return ^(id event) { + self.onEventSinkCalled(event); + }; + } + return nil; +} + +@end + +@interface MockFlutterBinaryMessenger : NSObject +@end + +@implementation MockFlutterBinaryMessenger +- (void)sendOnChannel:(NSString *)channel message:(NSData *)message { +} + +- (void)sendOnChannel:(NSString *)channel + message:(NSData *)message + binaryReply:(FlutterBinaryReply)callback { +} + +- (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(NSString *)channel + binaryMessageHandler: + (FlutterBinaryMessageHandler)handler { + return 0; +} + +- (void)cleanUpConnection:(FlutterBinaryMessengerConnection)connection { +} + +- (void)cleanupConnection:(FlutterBinaryMessengerConnection)connection { +} +@end + @interface StreamingTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; @property(readonly, nonatomic) CMSampleBufferRef sampleBuffer; +@property(readonly, nonatomic) MockImageStreamHandler *mockStreamHandler; +@property(readonly, nonatomic) MockFlutterBinaryMessenger *messengerMock; + @end @implementation StreamingTests - (void)setUp { dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); - _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); + _mockStreamHandler = + [[MockImageStreamHandler alloc] initWithCaptureSessionQueue:captureSessionQueue]; + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = captureSessionQueue; + _camera = FLTCreateCamWithConfiguration(configuration); _sampleBuffer = FLTCreateTestSampleBuffer(); + _messengerMock = [[MockFlutterBinaryMessenger alloc] init]; } - (void)tearDown { @@ -32,13 +81,11 @@ - (void)testExceedMaxStreamingPendingFramesCount { XCTestExpectation *streamingExpectation = [self expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; - id handlerMock = OCMClassMock([FLTImageStreamHandler class]); - OCMStub([handlerMock eventSink]).andReturn(^(id event) { + _mockStreamHandler.onEventSinkCalled = ^(id eventSink) { [streamingExpectation fulfill]; - }); + }; - id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; + [_camera startImageStreamWithMessenger:_messengerMock imageStreamHandler:_mockStreamHandler]; XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" object:_camera @@ -59,13 +106,11 @@ - (void)testReceivedImageStreamData { [self expectationWithDescription: @"Must be able to call the handler again when receivedImageStreamData is called"]; - id handlerMock = OCMClassMock([FLTImageStreamHandler class]); - OCMStub([handlerMock eventSink]).andReturn(^(id event) { + _mockStreamHandler.onEventSinkCalled = ^(id eventSink) { [streamingExpectation fulfill]; - }); + }; - id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; + [_camera startImageStreamWithMessenger:_messengerMock imageStreamHandler:_mockStreamHandler]; XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" object:_camera diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h new file mode 100644 index 000000000000..983e00c036af --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h @@ -0,0 +1,114 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +/** + * A test implemetation of `FLTCamMediaSettingsAVWrapper` + * + * This xctest-expectation-checking implementation of `FLTCamMediaSettingsAVWrapper` is injected + * into `camera-avfoundation` plugin instead of real AVFoundation-based realization. + * Such kind of Dependency Injection (DI) allows to run media-settings tests without + * any additional mocking of AVFoundation classes. + */ +@interface TestMediaSettingsAVWrapper : FLTCamMediaSettingsAVWrapper +@property(nonatomic, readonly) XCTestExpectation *lockExpectation; +@property(nonatomic, readonly) XCTestExpectation *unlockExpectation; +@property(nonatomic, readonly) XCTestExpectation *minFrameDurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *maxFrameDurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *beginConfigurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *commitConfigurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *audioSettingsExpectation; +@property(nonatomic, readonly) XCTestExpectation *videoSettingsExpectation; +@end + +@implementation TestMediaSettingsAVWrapper + +- (instancetype)initWithTestCase:(XCTestCase *)test { + _lockExpectation = [test expectationWithDescription:@"lockExpectation"]; + _unlockExpectation = [test expectationWithDescription:@"unlockExpectation"]; + _minFrameDurationExpectation = [test expectationWithDescription:@"minFrameDurationExpectation"]; + _maxFrameDurationExpectation = [test expectationWithDescription:@"maxFrameDurationExpectation"]; + _beginConfigurationExpectation = + [test expectationWithDescription:@"beginConfigurationExpectation"]; + _commitConfigurationExpectation = + [test expectationWithDescription:@"commitConfigurationExpectation"]; + _audioSettingsExpectation = [test expectationWithDescription:@"audioSettingsExpectation"]; + _videoSettingsExpectation = [test expectationWithDescription:@"videoSettingsExpectation"]; + + return self; +} + +- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError **)outError { + [_lockExpectation fulfill]; + return YES; +} + +- (void)unlockDevice:(AVCaptureDevice *)captureDevice { + [_unlockExpectation fulfill]; +} + +- (void)beginConfigurationForSession:(id)videoCaptureSession { + [_beginConfigurationExpectation fulfill]; +} + +- (void)commitConfigurationForSession:(id)videoCaptureSession { + [_commitConfigurationExpectation fulfill]; +} + +- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { + // FLTCam allows to set frame rate with 1/10 precision. + CMTime expectedDuration = CMTimeMake(10, gTestFramesPerSecond * 10); + + if (duration.value == expectedDuration.value && + duration.timescale == expectedDuration.timescale) { + [_minFrameDurationExpectation fulfill]; + } +} + +- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { + // FLTCam allows to set frame rate with 1/10 precision. + CMTime expectedDuration = CMTimeMake(10, gTestFramesPerSecond * 10); + + if (duration.value == expectedDuration.value && + duration.timescale == expectedDuration.timescale) { + [_maxFrameDurationExpectation fulfill]; + } +} + +- (id)assetWriterAudioInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + if ([outputSettings[AVEncoderBitRateKey] isEqual:@(gTestAudioBitrate)]) { + [_audioSettingsExpectation fulfill]; + } + + return [[MockAssetWriterInput alloc] init]; +} + +- (id)assetWriterVideoInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + if ([outputSettings[AVVideoCompressionPropertiesKey] isKindOfClass:[NSMutableDictionary class]]) { + NSDictionary *compressionProperties = outputSettings[AVVideoCompressionPropertiesKey]; + + if ([compressionProperties[AVVideoAverageBitRateKey] isEqual:@(gTestVideoBitrate)] && + [compressionProperties[AVVideoExpectedSourceFrameRateKey] + isEqual:@(gTestFramesPerSecond)]) { + [_videoSettingsExpectation fulfill]; + } + } + + return [[MockAssetWriterInput alloc] init]; +} + +- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { +} + +- (NSDictionary *) + recommendedVideoSettingsForAssetWriterWithFileType:(AVFileType)fileType + forOutput:(AVCaptureVideoDataOutput *)output { + return @{}; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m index 169b75ddfbb1..dfd539ecfd3d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m @@ -7,61 +7,66 @@ @import camera_avfoundation.Test; #endif @import XCTest; -#import + +#import "MockEventChannel.h" @interface ThreadSafeEventChannelTests : XCTestCase +@property(readonly, nonatomic) MockEventChannel *mockEventChannel; +@property(readonly, nonatomic) FLTThreadSafeEventChannel *threadSafeEventChannel; @end @implementation ThreadSafeEventChannelTests -- (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { - FlutterEventChannel *mockEventChannel = OCMClassMock([FlutterEventChannel class]); - FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:mockEventChannel]; +- (void)setUp { + [super setUp]; + _mockEventChannel = [[MockEventChannel alloc] init]; + _threadSafeEventChannel = + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:_mockEventChannel]; +} +- (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { XCTestExpectation *mainThreadExpectation = [self expectationWithDescription:@"setStreamHandler must be called on the main thread"]; XCTestExpectation *mainThreadCompletionExpectation = [self expectationWithDescription: @"setStreamHandler's completion block must be called on the main thread"]; - OCMStub([mockEventChannel setStreamHandler:[OCMArg any]]).andDo(^(NSInvocation *invocation) { + + [_mockEventChannel setSetStreamHandlerStub:^(NSObject *handler) { if (NSThread.isMainThread) { [mainThreadExpectation fulfill]; } - }); + }]; - [threadSafeEventChannel setStreamHandler:nil - completion:^{ - if (NSThread.isMainThread) { - [mainThreadCompletionExpectation fulfill]; - } - }]; + [_threadSafeEventChannel setStreamHandler:nil + completion:^{ + if (NSThread.isMainThread) { + [mainThreadCompletionExpectation fulfill]; + } + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThread { - FlutterEventChannel *mockEventChannel = OCMClassMock([FlutterEventChannel class]); - FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:mockEventChannel]; - XCTestExpectation *mainThreadExpectation = [self expectationWithDescription:@"setStreamHandler must be called on the main thread"]; XCTestExpectation *mainThreadCompletionExpectation = [self expectationWithDescription: @"setStreamHandler's completion block must be called on the main thread"]; - OCMStub([mockEventChannel setStreamHandler:[OCMArg any]]).andDo(^(NSInvocation *invocation) { + + [_mockEventChannel setSetStreamHandlerStub:^(NSObject *handler) { if (NSThread.isMainThread) { [mainThreadExpectation fulfill]; } - }); + }]; + __weak typeof(self) weakSelf = self; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ - [threadSafeEventChannel setStreamHandler:nil - completion:^{ - if (NSThread.isMainThread) { - [mainThreadCompletionExpectation fulfill]; - } - }]; + [weakSelf.threadSafeEventChannel setStreamHandler:nil + completion:^{ + if (NSThread.isMainThread) { + [mainThreadCompletionExpectation fulfill]; + } + }]; }); [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -69,11 +74,13 @@ - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThr - (void)testEventChannel_shouldBeKeptAliveWhenDispatchingBackToMainThread { XCTestExpectation *expectation = [self expectationWithDescription:@"Completion should be called."]; + + __weak typeof(self) weakSelf = self; dispatch_async(dispatch_queue_create("test", NULL), ^{ - FLTThreadSafeEventChannel *channel = [[FLTThreadSafeEventChannel alloc] - initWithEventChannel:OCMClassMock([FlutterEventChannel class])]; + FLTThreadSafeEventChannel *channel = + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:weakSelf.mockEventChannel]; - [channel setStreamHandler:OCMOCK_ANY + [channel setStreamHandler:nil completion:^{ [expectation fulfill]; }]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionUtils.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionUtils.m deleted file mode 100644 index b63a1d684e00..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionUtils.m +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import AVFoundation; -#import "./include/camera_avfoundation/CameraPermissionUtils.h" - -void FLTRequestPermission(BOOL forAudio, FLTCameraPermissionRequestCompletionHandler handler) { - AVMediaType mediaType; - if (forAudio) { - mediaType = AVMediaTypeAudio; - } else { - mediaType = AVMediaTypeVideo; - } - - switch ([AVCaptureDevice authorizationStatusForMediaType:mediaType]) { - case AVAuthorizationStatusAuthorized: - handler(nil); - break; - case AVAuthorizationStatusDenied: { - FlutterError *flutterError; - if (forAudio) { - flutterError = - [FlutterError errorWithCode:@"AudioAccessDeniedWithoutPrompt" - message:@"User has previously denied the audio access request. " - @"Go to Settings to enable audio access." - details:nil]; - } else { - flutterError = - [FlutterError errorWithCode:@"CameraAccessDeniedWithoutPrompt" - message:@"User has previously denied the camera access request. " - @"Go to Settings to enable camera access." - details:nil]; - } - handler(flutterError); - break; - } - case AVAuthorizationStatusRestricted: { - FlutterError *flutterError; - if (forAudio) { - flutterError = [FlutterError errorWithCode:@"AudioAccessRestricted" - message:@"Audio access is restricted. " - details:nil]; - } else { - flutterError = [FlutterError errorWithCode:@"CameraAccessRestricted" - message:@"Camera access is restricted. " - details:nil]; - } - handler(flutterError); - break; - } - case AVAuthorizationStatusNotDetermined: { - [AVCaptureDevice requestAccessForMediaType:mediaType - completionHandler:^(BOOL granted) { - // handler can be invoked on an arbitrary dispatch queue. - if (granted) { - handler(nil); - } else { - FlutterError *flutterError; - if (forAudio) { - flutterError = [FlutterError - errorWithCode:@"AudioAccessDenied" - message:@"User denied the audio access request." - details:nil]; - } else { - flutterError = [FlutterError - errorWithCode:@"CameraAccessDenied" - message:@"User denied the camera access request." - details:nil]; - } - handler(flutterError); - } - }]; - break; - } - } -} - -void FLTRequestCameraPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler) { - FLTRequestPermission(/*forAudio*/ NO, handler); -} - -void FLTRequestAudioPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler) { - FLTRequestPermission(/*forAudio*/ YES, handler); -} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index de208fd560ef..558ccd245c14 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -8,10 +8,12 @@ @import AVFoundation; @import Flutter; -#import "./include/camera_avfoundation/CameraPermissionUtils.h" #import "./include/camera_avfoundation/CameraProperties.h" #import "./include/camera_avfoundation/FLTCam.h" +#import "./include/camera_avfoundation/FLTCameraPermissionManager.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" +#import "./include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h" +#import "./include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" @@ -25,6 +27,10 @@ @interface CameraPlugin () @property(readonly, nonatomic) id registry; @property(readonly, nonatomic) NSObject *messenger; @property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI; +@property(readonly, nonatomic) FLTCameraPermissionManager *permissionManager; +@property(readonly, nonatomic) id deviceDiscovery; +@property(nonatomic, copy) CaptureSessionFactory captureSessionFactory; +@property(nonatomic, copy) CaptureNamedDeviceFactory captureDeviceFactory; @end @implementation CameraPlugin @@ -37,21 +43,40 @@ + (void)registerWithRegistrar:(NSObject *)registrar { - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger { - return - [self initWithRegistry:registry - messenger:messenger - globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger]]; + return [self initWithRegistry:registry + messenger:messenger + globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] + deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init] + sessionFactory:^id(void) { + return [[FLTDefaultCaptureSession alloc] + initWithCaptureSession:[[AVCaptureSession alloc] init]]; + } + deviceFactory:^id(NSString *name) { + AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:name]; + return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; + }]; } - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger - globalAPI:(FCPCameraGlobalEventApi *)globalAPI { + globalAPI:(FCPCameraGlobalEventApi *)globalAPI + deviceDiscovery:(id)deviceDiscovery + sessionFactory:(CaptureSessionFactory)captureSessionFactory + deviceFactory:(CaptureNamedDeviceFactory)deviceFactory { self = [super init]; NSAssert(self, @"super init cannot be nil"); _registry = registry; _messenger = messenger; _globalEventAPI = globalAPI; _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL); + _deviceDiscovery = deviceDiscovery; + _captureSessionFactory = captureSessionFactory; + _captureDeviceFactory = deviceFactory; + + id permissionService = [[FLTDefaultPermissionService alloc] init]; + _permissionManager = + [[FLTCameraPermissionManager alloc] initWithPermissionService:permissionService]; + dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); @@ -111,11 +136,12 @@ - (void)availableCamerasWithCompletion: if (@available(iOS 13.0, *)) { [discoveryDevices addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession - discoverySessionWithDeviceTypes:discoveryDevices - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]; - NSArray *devices = discoverySession.devices; + + NSArray> *devices = + [self.deviceDiscovery discoverySessionWithDeviceTypes:discoveryDevices + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + NSMutableArray *reply = [[NSMutableArray alloc] initWithCapacity:devices.count]; for (AVCaptureDevice *device in devices) { @@ -145,7 +171,7 @@ - (void)createCameraWithName:(nonnull NSString *)cameraName // Create FLTCam only if granted camera access (and audio access if audio is enabled) __weak typeof(self) weakSelf = self; dispatch_async(self.captureSessionQueue, ^{ - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self->_permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { typeof(self) strongSelf = weakSelf; if (!strongSelf) return; @@ -157,25 +183,26 @@ - (void)createCameraWithName:(nonnull NSString *)cameraName // optional, and used as a workaround to fix a missing frame issue on iOS. if (settings.enableAudio) { // Setup audio capture session only if granted audio access. - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { - // cannot use the outter `strongSelf` - typeof(self) strongSelf = weakSelf; - if (!strongSelf) return; - if (error) { - completion(nil, error); - } else { - [strongSelf createCameraOnSessionQueueWithName:cameraName - settings:settings - completion:completion]; - } - }); + [self->_permissionManager + requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { + // cannot use the outter `strongSelf` + typeof(self) strongSelf = weakSelf; + if (!strongSelf) return; + if (error) { + completion(nil, error); + } else { + [strongSelf createCameraOnSessionQueueWithName:cameraName + settings:settings + completion:completion]; + } + }]; } else { [strongSelf createCameraOnSessionQueueWithName:cameraName settings:settings completion:completion]; } } - }); + }]; }); } @@ -471,12 +498,22 @@ - (void)sessionQueueCreateCameraWithName:(NSString *)name [[FLTCamMediaSettingsAVWrapper alloc] init]; NSError *error; - FLTCam *cam = [[FLTCam alloc] initWithCameraName:name - mediaSettings:settings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:[[UIDevice currentDevice] orientation] - captureSessionQueue:self.captureSessionQueue - error:&error]; + + __weak typeof(self) weakSelf = self; + + FLTCamConfiguration *configuration = [[FLTCamConfiguration alloc] initWithMediaSettings:settings + mediaSettingsWrapper:mediaSettingsAVWrapper + captureDeviceFactory:^id _Nonnull { + return weakSelf.captureDeviceFactory(name); + } + captureSessionQueue:self.captureSessionQueue + captureSessionFactory:_captureSessionFactory + audioCaptureDeviceFactory:^id _Nonnull { + return [[FLTDefaultCaptureDeviceController alloc] + initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]]; + }]; + + FLTCam *cam = [[FLTCam alloc] initWithConfiguration:configuration error:&error]; if (error) { completion(nil, FlutterErrorFromNSError(error)); diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 0b065026f10e..086f1e446cff 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -9,8 +9,15 @@ @import Flutter; #import +#import "./include/camera_avfoundation/FLTCamConfiguration.h" #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureConnection.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" +#import "./include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSession.h" +#import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" +#import "./include/camera_avfoundation/Protocols/FLTEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" @@ -54,18 +61,18 @@ @interface FLTCam () videoCaptureSession; +@property(readonly, nonatomic) id audioCaptureSession; -@property(readonly, nonatomic) AVCaptureInput *captureVideoInput; +@property(readonly, nonatomic) id captureVideoInput; /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. @property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer; @property(readonly, nonatomic) CGSize captureSize; -@property(strong, nonatomic) AVAssetWriter *videoWriter; -@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput; -@property(strong, nonatomic) AVAssetWriterInput *audioWriterInput; -@property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor; +@property(strong, nonatomic) id videoWriter; +@property(strong, nonatomic) id videoWriterInput; +@property(strong, nonatomic) id audioWriterInput; +@property(strong, nonatomic) id assetWriterPixelBufferAdaptor; @property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput; @property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput; @property(strong, nonatomic) NSString *videoRecordingPath; @@ -88,7 +95,7 @@ @interface FLTCam () videoAdaptor; /// All FLTCam's state access and capture session related operations should be on run on this queue. @property(strong, nonatomic) dispatch_queue_t captureSessionQueue; /// The queue on which `latestPixelBuffer` property is accessed. @@ -103,6 +110,11 @@ @interface FLTCam () photoSettingsFactory; +@property(readonly, nonatomic) id deviceOrientationProvider; /// Reports the given error message to the Dart side of the plugin. /// @@ -114,50 +126,11 @@ @implementation FLTCam NSString *const errorMethod = @"error"; -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error { - return [self initWithCameraName:cameraName - mediaSettings:mediaSettings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:orientation - videoCaptureSession:[[AVCaptureSession alloc] init] - audioCaptureSession:[[AVCaptureSession alloc] init] - captureSessionQueue:captureSessionQueue - error:error]; -} - -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error { - return [self initWithMediaSettings:mediaSettings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:orientation - videoCaptureSession:videoCaptureSession - audioCaptureSession:videoCaptureSession - captureSessionQueue:captureSessionQueue - captureDeviceFactory:^AVCaptureDevice *(void) { - return [AVCaptureDevice deviceWithUniqueID:cameraName]; - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - error:error]; -} - // Returns frame rate supported by format closest to targetFrameRate. -static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targetFrameRate) { +static double bestFrameRateForFormat(id format, double targetFrameRate) { double bestFrameRate = 0; double minDistance = DBL_MAX; - for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + for (id range in format.videoSupportedFrameRateRanges) { double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); double distance = fabs(frameRate - targetFrameRate); if (distance < minDistance) { @@ -174,17 +147,17 @@ static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targe // as activeFormat and also updates mediaSettings.framesPerSecond to value which // bestFrameRateForFormat returned for that format. static void selectBestFormatForRequestedFrameRate( - AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, + id captureDevice, FCPPlatformMediaSettings *mediaSettings, VideoDimensionsForFormat videoDimensionsForFormat) { CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; FourCharCode preferredSubType = CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; + id bestFormat = captureDevice.activeFormat; double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); double minDistance = fabs(bestFrameRate - targetFrameRate); BOOL isBestSubTypePreferred = YES; - for (AVCaptureDeviceFormat *format in captureDevice.formats) { + for (id format in captureDevice.formats) { CMVideoDimensions resolution = videoDimensionsForFormat(format); if (resolution.width != targetResolution.width || resolution.height != targetResolution.height) { @@ -206,37 +179,34 @@ static void selectBestFormatForRequestedFrameRate( mediaSettings.framesPerSecond = @(bestFrameRate); } -- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory - videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat - error:(NSError **)error { +- (nonnull instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configuration + error:(NSError **)error { self = [super init]; NSAssert(self, @"super init cannot be nil"); - _mediaSettings = mediaSettings; - _mediaSettingsAVWrapper = mediaSettingsAVWrapper; + _mediaSettings = configuration.mediaSettings; + _mediaSettingsAVWrapper = configuration.mediaSettingsWrapper; - _captureSessionQueue = captureSessionQueue; + _captureSessionQueue = configuration.captureSessionQueue; _pixelBufferSynchronizationQueue = dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL); _photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL); - _videoCaptureSession = videoCaptureSession; - _audioCaptureSession = audioCaptureSession; - _captureDeviceFactory = captureDeviceFactory; - _captureDevice = captureDeviceFactory(); - _videoDimensionsForFormat = videoDimensionsForFormat; + _videoCaptureSession = configuration.videoCaptureSession; + _audioCaptureSession = configuration.audioCaptureSession; + _captureDeviceFactory = configuration.captureDeviceFactory; + _captureDevice = _captureDeviceFactory(); + _audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory; + _videoDimensionsForFormat = configuration.videoDimensionsForFormat; _flashMode = _captureDevice.hasFlash ? FCPPlatformFlashModeAuto : FCPPlatformFlashModeOff; _exposureMode = FCPPlatformExposureModeAuto; _focusMode = FCPPlatformFocusModeAuto; _lockedCaptureOrientation = UIDeviceOrientationUnknown; - _deviceOrientation = orientation; + _deviceOrientation = configuration.orientation; _videoFormat = kCVPixelFormatType_32BGRA; _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary]; _fileFormat = FCPPlatformImageFileFormatJpeg; + _assetWriterFactory = configuration.assetWriterFactory; + _pixelBufferAdaptorFactory = configuration.pixelBufferAdaptorFactory; + _photoSettingsFactory = configuration.photoSettingsFactory; // To limit memory consumption, limit the number of frames pending processing. // After some testing, 4 was determined to be the best maximum value. @@ -244,7 +214,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _maxStreamingPendingFramesCount = 4; NSError *localError = nil; - AVCaptureConnection *connection = [self createConnection:&localError]; + id connection = [self createConnection:&localError]; if (localError) { if (error != nil) { *error = localError; @@ -252,20 +222,22 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return nil; } - [_videoCaptureSession addInputWithNoConnections:_captureVideoInput]; + [_videoCaptureSession addInputWithNoConnections:_captureVideoInput.input]; [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput]; [_videoCaptureSession addConnection:connection]; - _capturePhotoOutput = [AVCapturePhotoOutput new]; + _capturePhotoOutput = configuration.capturePhotoOutput; [_capturePhotoOutput setHighResolutionCaptureEnabled:YES]; - [_videoCaptureSession addOutput:_capturePhotoOutput]; + [_videoCaptureSession addOutput:_capturePhotoOutput.photoOutput]; _motionManager = [[CMMotionManager alloc] init]; [_motionManager startAccelerometerUpdates]; + _deviceOrientationProvider = configuration.deviceOrientationProvider; + if (_mediaSettings.framesPerSecond) { // The frame rate can be changed only on a locked for configuration device. - if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { + if ([_mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; // Possible values for presets are hard-coded in FLT interface having @@ -286,8 +258,8 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); CMTime duration = CMTimeMake(10, fpsNominator); - [mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; - [mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; + [_mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; + [_mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; [_mediaSettingsAVWrapper unlockDevice:_captureDevice]; @@ -307,9 +279,9 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return self; } -- (AVCaptureConnection *)createConnection:(NSError **)error { +- (id)createConnection:(NSError **)error { // Setup video capture input. - _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error]; + _captureVideoInput = [_captureDevice createInput:error]; // Test the return value of the `deviceInputWithDevice` method to see whether an error occurred. // Don’t just test to see whether the error pointer was set to point to an error. @@ -334,7 +306,7 @@ - (AVCaptureConnection *)createConnection:(NSError **)error { connection.videoMirrored = YES; } - return connection; + return [[FLTDefaultCaptureConnection alloc] initWithConnection:connection]; } - (void)reportInitializationState { @@ -344,8 +316,8 @@ - (void)reportInitializationState { height:self.previewSize.height] exposureMode:self.exposureMode focusMode:self.focusMode - exposurePointSupported:self.captureDevice.exposurePointOfInterestSupported - focusPointSupported:self.captureDevice.focusPointOfInterestSupported]; + exposurePointSupported:self.captureDevice.isExposurePointOfInterestSupported + focusPointSupported:self.captureDevice.isFocusPointOfInterestSupported]; __weak typeof(self) weakSelf = self; FLTEnsureToRunOnMainQueue(^{ @@ -394,7 +366,7 @@ - (void)updateOrientation { ? _lockedCaptureOrientation : _deviceOrientation; - [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput]; + [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput.photoOutput]; [self updateOrientation:orientation forCaptureOutput:_captureVideoOutput]; } @@ -412,7 +384,7 @@ - (void)updateOrientation:(UIDeviceOrientation)orientation - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, FlutterError *_Nullable))completion { - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + id settings = [_photoSettingsFactory createPhotoSettings]; if (self.mediaSettings.resolutionPreset == FCPPlatformResolutionPresetMax) { [settings setHighResolutionPhotoEnabled:YES]; @@ -424,8 +396,8 @@ - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, [self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC]; if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) { - settings = - [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; + settings = [_photoSettingsFactory + createPhotoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; extension = @"heif"; } else { extension = @"jpg"; @@ -522,13 +494,12 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset withError:(NSError **)error { switch (resolutionPreset) { case FCPPlatformResolutionPresetMax: { - AVCaptureDeviceFormat *bestFormat = + id bestFormat = [self highestResolutionFormatForCaptureDevice:_captureDevice]; if (bestFormat) { _videoCaptureSession.sessionPreset = AVCaptureSessionPresetInputPriority; if ([_captureDevice lockForConfiguration:NULL]) { - // Set the best device format found and finish the device configuration. - _captureDevice.activeFormat = bestFormat; + // Set the best device formaxRQX= bestFormat; [_captureDevice unlockForConfiguration]; break; } @@ -587,14 +558,14 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset /// Finds the highest available resolution in terms of pixel count for the given device. /// Preferred are formats with the same subtype as current activeFormat. -- (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: - (AVCaptureDevice *)captureDevice { +- (id)highestResolutionFormatForCaptureDevice: + (id)captureDevice { FourCharCode preferredSubType = CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = nil; + id bestFormat = nil; NSUInteger maxPixelCount = 0; BOOL isBestSubTypePreferred = NO; - for (AVCaptureDeviceFormat *format in _captureDevice.formats) { + for (id format in _captureDevice.formats) { CMVideoDimensions res = self.videoDimensionsForFormat(format); NSUInteger height = res.height; NSUInteger width = res.width; @@ -613,7 +584,7 @@ - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection { + fromConnection:(id)connection { if (output == _captureVideoOutput) { CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CFRetain(newBuffer); @@ -751,7 +722,7 @@ - (void)captureOutput:(AVCaptureOutput *)output CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset); // do not append sample buffer when readyForMoreMediaData is NO to avoid crash // https://github.com/flutter/flutter/issues/132073 - if (_videoWriterInput.readyForMoreMediaData) { + if (_videoWriterInput.isReadyForMoreMediaData) { [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime]; } } else { @@ -810,7 +781,7 @@ - (void)newVideoSample:(CMSampleBufferRef)sampleBuffer { } return; } - if (_videoWriterInput.readyForMoreMediaData) { + if (_videoWriterInput.isReadyForMoreMediaData) { if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) { [self reportErrorMessage:@"Unable to write to video input"]; } @@ -824,7 +795,7 @@ - (void)newAudioSample:(CMSampleBufferRef)sampleBuffer { } return; } - if (_audioWriterInput.readyForMoreMediaData) { + if (_audioWriterInput.isReadyForMoreMediaData) { if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) { [self reportErrorMessage:@"Unable to write to audio input"]; } @@ -1038,7 +1009,8 @@ - (void)applyFocusMode { [self applyFocusMode:_focusMode onDevice:_captureDevice]; } -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice { +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode + onDevice:(id)captureDevice { [captureDevice lockForConfiguration:nil]; switch (focusMode) { case FCPPlatformFocusModeLocked: @@ -1084,11 +1056,11 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName // Remove the old video capture connections. [_videoCaptureSession beginConfiguration]; - [_videoCaptureSession removeInput:_captureVideoInput]; + [_videoCaptureSession removeInput:_captureVideoInput.input]; [_videoCaptureSession removeOutput:_captureVideoOutput]; NSError *error = nil; - AVCaptureConnection *newConnection = [self createConnection:&error]; + id newConnection = [self createConnection:&error]; if (error) { completion(FlutterErrorFromNSError(error)); return; @@ -1100,11 +1072,11 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName } // Add the new connections to the session. - if (![_videoCaptureSession canAddInput:_captureVideoInput]) + if (![_videoCaptureSession canAddInput:_captureVideoInput.input]) completion([FlutterError errorWithCode:@"VideoError" message:@"Unable switch video input" details:nil]); - [_videoCaptureSession addInputWithNoConnections:_captureVideoInput]; + [_videoCaptureSession addInputWithNoConnections:_captureVideoInput.input]; if (![_videoCaptureSession canAddOutput:_captureVideoOutput]) completion([FlutterError errorWithCode:@"VideoError" message:@"Unable switch video output" @@ -1175,7 +1147,7 @@ - (void)setFocusPoint:(FCPPlatformPoint *)point details:nil]); return; } - UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + UIDeviceOrientation orientation = [_deviceOrientationProvider orientation]; [_captureDevice lockForConfiguration:nil]; // A nil point resets to the center. [_captureDevice @@ -1206,8 +1178,10 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream" binaryMessenger:messenger]; + id eventChannelProtocol = + [[FLTDefaultEventChannel alloc] initWithEventChannel:eventChannel]; FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel]; + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannelProtocol]; _imageStreamHandler = imageStreamHandler; __weak typeof(self) weakSelf = self; @@ -1287,9 +1261,8 @@ - (BOOL)setupWriterForPath:(NSString *)path { [self setUpCaptureSessionForAudio]; } - _videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL - fileType:AVFileTypeMPEG4 - error:&error]; + _videoWriter = _assetWriterFactory(outputURL, AVFileTypeMPEG4, &error); + NSParameterAssert(_videoWriter); if (error) { [self reportErrorMessage:error.description]; @@ -1317,11 +1290,8 @@ - (BOOL)setupWriterForPath:(NSString *)path { _videoWriterInput = [_mediaSettingsAVWrapper assetWriterVideoInputWithOutputSettings:videoSettings]; - _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor - assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput - sourcePixelBufferAttributes:@{ - (NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat) - }]; + _videoAdaptor = _pixelBufferAdaptorFactory( + _videoWriterInput, @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)}); NSParameterAssert(_videoWriterInput); @@ -1374,17 +1344,16 @@ - (void)setUpCaptureSessionForAudio { NSError *error = nil; // Create a device input with the device and add it to the session. // Setup the audio input. - AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; - AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice - error:&error]; + id audioDevice = _audioCaptureDeviceFactory(); + id audioInput = [audioDevice createInput:&error]; if (error) { [self reportErrorMessage:error.description]; } // Setup the audio output. _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; - if ([_audioCaptureSession canAddInput:audioInput]) { - [_audioCaptureSession addInput:audioInput]; + if ([_audioCaptureSession canAddInput:audioInput.input]) { + [_audioCaptureSession addInput:audioInput.input]; if ([_audioCaptureSession canAddOutput:_audioOutput]) { [_audioCaptureSession addOutput:_audioOutput]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m new file mode 100644 index 000000000000..a047f71a488c --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m @@ -0,0 +1,50 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "./include/camera_avfoundation/FLTCamConfiguration.h" + +@implementation FLTCamConfiguration + +- (nonnull instancetype) + initWithMediaSettings:(nonnull FCPPlatformMediaSettings *)mediaSettings + mediaSettingsWrapper:(nonnull FLTCamMediaSettingsAVWrapper *)mediaSettingsWrapper + captureDeviceFactory:(nonnull CaptureDeviceFactory)captureDeviceFactory + captureSessionQueue:(nonnull dispatch_queue_t)captureSessionQueue + captureSessionFactory:(nonnull CaptureSessionFactory)captureSessionFactory + audioCaptureDeviceFactory:(nonnull AudioCaptureDeviceFactory)audioCaptureDeviceFactory { + self = [super init]; + if (self) { + _mediaSettings = mediaSettings; + _mediaSettingsWrapper = mediaSettingsWrapper; + _captureSessionQueue = captureSessionQueue; + _videoCaptureSession = captureSessionFactory(); + _audioCaptureSession = captureSessionFactory(); + _captureDeviceFactory = captureDeviceFactory; + _audioCaptureDeviceFactory = audioCaptureDeviceFactory; + _orientation = [[UIDevice currentDevice] orientation]; + _capturePhotoOutput = + [[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]]; + _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; + _assetWriterFactory = + ^id _Nonnull(NSURL *_Nonnull url, AVFileType _Nonnull fileType, + NSError *_Nullable __autoreleasing *_Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + }; + _pixelBufferAdaptorFactory = ^id( + id _Nonnull assetWriterInput, + NSDictionary *_Nullable sourcePixelBufferAttributes) { + return [[FLTDefaultPixelBufferAdaptor alloc] + initWithAdaptor:[[AVAssetWriterInputPixelBufferAdaptor alloc] + initWithAssetWriterInput:assetWriterInput.input + sourcePixelBufferAttributes:sourcePixelBufferAttributes]]; + }; + _photoSettingsFactory = [[FLTDefaultCapturePhotoSettingsFactory alloc] init]; + _videoDimensionsForFormat = ^CMVideoDimensions(id _Nonnull format) { + return CMVideoFormatDescriptionGetDimensions(format.formatDescription); + }; + } + return self; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m index b975daa4b5c2..0cc66b724240 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m @@ -3,47 +3,55 @@ // found in the LICENSE file. #import "./include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h" +#import "./include/camera_avfoundation/Protocols/FLTAssetWriter.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSession.h" @implementation FLTCamMediaSettingsAVWrapper -- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError *_Nullable *_Nullable)outError { +- (BOOL)lockDevice:(id)captureDevice + error:(NSError *_Nullable *_Nullable)outError { return [captureDevice lockForConfiguration:outError]; } -- (void)unlockDevice:(AVCaptureDevice *)captureDevice { +- (void)unlockDevice:(id)captureDevice { return [captureDevice unlockForConfiguration]; } -- (void)beginConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { [videoCaptureSession beginConfiguration]; } -- (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { [videoCaptureSession commitConfiguration]; } -- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +- (void)setMinFrameDuration:(CMTime)duration + onDevice:(id)captureDevice { captureDevice.activeVideoMinFrameDuration = duration; } -- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +- (void)setMaxFrameDuration:(CMTime)duration + onDevice:(id)captureDevice { captureDevice.activeVideoMaxFrameDuration = duration; } -- (AVAssetWriterInput *)assetWriterAudioInputWithOutputSettings: +- (id)assetWriterAudioInputWithOutputSettings: (nullable NSDictionary *)outputSettings { - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio - outputSettings:outputSettings]; + return [[FLTDefaultAssetWriterInput alloc] + initWithInput:[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio + outputSettings:outputSettings]]; } -- (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: +- (id)assetWriterVideoInputWithOutputSettings: (nullable NSDictionary *)outputSettings { - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo - outputSettings:outputSettings]; + return [[FLTDefaultAssetWriterInput alloc] + initWithInput:[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo + outputSettings:outputSettings]]; } -- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { - [writer addInput:writerInput]; +- (void)addInput:(id)writerInput toAssetWriter:(id)writer { + [writer addInput:writerInput.input]; } - (nullable NSDictionary *) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m new file mode 100644 index 000000000000..748338096380 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m @@ -0,0 +1,101 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +#import "./include/camera_avfoundation/FLTCameraPermissionManager.h" +#import "./include/camera_avfoundation/Protocols/FLTPermissionService.h" + +@implementation FLTCameraPermissionManager + +- (instancetype)initWithPermissionService:(id)service { + self = [super init]; + if (self) { + _permissionService = service ?: [[FLTDefaultPermissionService alloc] init]; + } + return self; +} + +- (void)requestAudioPermissionWithCompletionHandler: + (__strong FLTCameraPermissionRequestCompletionHandler)handler { + [self requestPermissionForAudio:YES handler:handler]; +} + +- (void)requestCameraPermissionWithCompletionHandler: + (__strong FLTCameraPermissionRequestCompletionHandler)handler { + [self requestPermissionForAudio:NO handler:handler]; +} + +- (void)requestPermissionForAudio:(BOOL)forAudio + handler:(FLTCameraPermissionRequestCompletionHandler)handler { + AVMediaType mediaType; + if (forAudio) { + mediaType = AVMediaTypeAudio; + } else { + mediaType = AVMediaTypeVideo; + } + + switch ([_permissionService authorizationStatusForMediaType:mediaType]) { + case AVAuthorizationStatusAuthorized: + handler(nil); + break; + case AVAuthorizationStatusDenied: { + FlutterError *flutterError; + if (forAudio) { + flutterError = + [FlutterError errorWithCode:@"AudioAccessDeniedWithoutPrompt" + message:@"User has previously denied the audio access request. " + @"Go to Settings to enable audio access." + details:nil]; + } else { + flutterError = + [FlutterError errorWithCode:@"CameraAccessDeniedWithoutPrompt" + message:@"User has previously denied the camera access request. " + @"Go to Settings to enable camera access." + details:nil]; + } + handler(flutterError); + break; + } + case AVAuthorizationStatusRestricted: { + FlutterError *flutterError; + if (forAudio) { + flutterError = [FlutterError errorWithCode:@"AudioAccessRestricted" + message:@"Audio access is restricted. " + details:nil]; + } else { + flutterError = [FlutterError errorWithCode:@"CameraAccessRestricted" + message:@"Camera access is restricted. " + details:nil]; + } + handler(flutterError); + break; + } + case AVAuthorizationStatusNotDetermined: { + [_permissionService requestAccessForMediaType:mediaType + completionHandler:^(BOOL granted) { + // handler can be invoked on an arbitrary dispatch queue. + if (granted) { + handler(nil); + } else { + FlutterError *flutterError; + if (forAudio) { + flutterError = [FlutterError + errorWithCode:@"AudioAccessDenied" + message:@"User denied the audio access request." + details:nil]; + } else { + flutterError = [FlutterError + errorWithCode:@"CameraAccessDenied" + message:@"User denied the camera access request." + details:nil]; + } + handler(flutterError); + } + }]; + break; + } + } +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m index 5b45f93c221c..c574e1abef66 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m @@ -4,6 +4,8 @@ #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTSavePhotoDelegate_Test.h" +#import "./include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h" +#import "./include/camera_avfoundation/Protocols/FLTPhotoData.h" @interface FLTSavePhotoDelegate () /// The file path for the captured photo. @@ -26,7 +28,7 @@ - (instancetype)initWithPath:(NSString *)path } - (void)handlePhotoCaptureResultWithError:(NSError *)error - photoDataProvider:(NSData * (^)(void))photoDataProvider { + photoDataProvider:(id (^)(void))photoDataProvider { if (error) { self.completionHandler(nil, error); return; @@ -36,7 +38,7 @@ - (void)handlePhotoCaptureResultWithError:(NSError *)error typeof(self) strongSelf = weakSelf; if (!strongSelf) return; - NSData *data = photoDataProvider(); + id data = photoDataProvider(); NSError *ioError; if ([data writeToFile:strongSelf.path options:NSDataWritingAtomic error:&ioError]) { strongSelf.completionHandler(self.path, nil); @@ -46,12 +48,13 @@ - (void)handlePhotoCaptureResultWithError:(NSError *)error }); } -- (void)captureOutput:(AVCapturePhotoOutput *)output +- (void)captureOutput:(id)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error { [self handlePhotoCaptureResultWithError:error - photoDataProvider:^NSData * { - return [photo fileDataRepresentation]; + photoDataProvider:^id { + NSData *data = [photo fileDataRepresentation]; + return [[FLTDefaultPhotoData alloc] initWithData:data]; }]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m index 53c7273a5901..515fa75948d2 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m @@ -3,15 +3,16 @@ // found in the LICENSE file. #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" +#import "./include/camera_avfoundation/Protocols/FLTEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" @interface FLTThreadSafeEventChannel () -@property(nonatomic, strong) FlutterEventChannel *channel; +@property(nonatomic, strong) id channel; @end @implementation FLTThreadSafeEventChannel -- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel { +- (instancetype)initWithEventChannel:(id)channel { self = [super init]; if (self) { _channel = channel; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m new file mode 100644 index 000000000000..cf16f3f5e71d --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m @@ -0,0 +1,98 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTAssetWriter.h" + +@interface FLTDefaultAssetWriter () +@property(nonatomic, strong) AVAssetWriter *writer; +@end + +@implementation FLTDefaultAssetWriter + +- (instancetype)initWithURL:(NSURL *)url fileType:(AVFileType)fileType error:(NSError **)error { + self = [super init]; + if (self) { + _writer = [[AVAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } + return self; +} + +- (BOOL)startWriting { + return [self.writer startWriting]; +} + +- (void)finishWritingWithCompletionHandler:(void (^)(void))handler { + [self.writer finishWritingWithCompletionHandler:handler]; +} + +- (AVAssetWriterStatus)status { + return _writer.status; +} + +- (NSError *)error { + return _writer.error; +} + +- (void)startSessionAtSourceTime:(CMTime)startTime { + return [_writer startSessionAtSourceTime:startTime]; +} + +- (void)addInput:(AVAssetWriterInput *)input { + return [_writer addInput:input]; +} + +@end + +@interface FLTDefaultAssetWriterInput () +@property(nonatomic, strong) AVAssetWriterInput *input; +@end + +@implementation FLTDefaultAssetWriterInput + +- (instancetype)initWithInput:(AVAssetWriterInput *)input { + self = [super init]; + if (self) { + _input = input; + } + return self; +} + +- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer { + return [self.input appendSampleBuffer:sampleBuffer]; +} + +- (BOOL)expectsMediaDataInRealTime { + return [self.input expectsMediaDataInRealTime]; +} + +- (void)setExpectsMediaDataInRealTime:(BOOL)expectsMediaDataInRealTime { + self.input.expectsMediaDataInRealTime = expectsMediaDataInRealTime; +} + +- (BOOL)isReadyForMoreMediaData { + return [self.input isReadyForMoreMediaData]; +} + +@end + +@interface FLTDefaultPixelBufferAdaptor () +@property(nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *adaptor; +@end + +@implementation FLTDefaultPixelBufferAdaptor + +- (nonnull instancetype)initWithAdaptor:(nonnull AVAssetWriterInputPixelBufferAdaptor *)adaptor { + self = [super init]; + if (self) { + _adaptor = adaptor; + } + return self; +} + +- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer + withPresentationTime:(CMTime)presentationTime { + return [_adaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m new file mode 100644 index 000000000000..a691c40bf052 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m @@ -0,0 +1,32 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +#import "FLTCameraDeviceDiscovery.h" + +@implementation FLTDefaultCameraDeviceDiscovery + +- (NSArray> *) + discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position { + AVCaptureDeviceDiscoverySession *discoverySession = + [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes + mediaType:mediaType + position:position]; + + NSArray *devices = discoverySession.devices; + NSMutableArray> *deviceControllers = [NSMutableArray array]; + + for (AVCaptureDevice *device in devices) { + FLTDefaultCaptureDeviceController *controller = + [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; + [deviceControllers addObject:controller]; + } + + return deviceControllers; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m new file mode 100644 index 000000000000..2eaffd65d13e --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m @@ -0,0 +1,49 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCaptureConnection.h" + +@interface FLTDefaultCaptureConnection () +@property(nonatomic, strong) AVCaptureConnection *connection; +@end + +@implementation FLTDefaultCaptureConnection + +- (instancetype)initWithConnection:(AVCaptureConnection *)connection { + self = [super init]; + if (self) { + _connection = connection; + } + return self; +} + +- (BOOL)isVideoMirroringSupported { + return self.connection.isVideoMirroringSupported; +} + +- (BOOL)isVideoOrientationSupported { + return self.connection.isVideoOrientationSupported; +} + +- (void)setVideoMirrored:(BOOL)videoMirrored { + self.connection.videoMirrored = videoMirrored; +} + +- (BOOL)videoMirrored { + return self.connection.videoMirrored; +} + +- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation { + self.connection.videoOrientation = videoOrientation; +} + +- (AVCaptureVideoOrientation)videoOrientation { + return self.connection.videoOrientation; +} + +- (NSArray *)inputPorts { + return self.connection.inputPorts; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m new file mode 100644 index 000000000000..0b932c9fd4a7 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -0,0 +1,251 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" + +@interface FLTDefaultCaptureDeviceController () +@property(nonatomic, strong) AVCaptureDevice *device; +@end + +@implementation FLTDefaultCaptureDeviceController + +- (instancetype)initWithDevice:(AVCaptureDevice *)device { + self = [super init]; + if (self) { + _device = device; + } + return self; +} + +- (nonnull NSString *)uniqueID { + return self.device.uniqueID; +} + +// Position/Orientation +- (AVCaptureDevicePosition)position { + return self.device.position; +} + +// Format/Configuration +- (id)activeFormat { + return [[FLTDefaultCaptureDeviceFormat alloc] initWithFormat:self.device.activeFormat]; +} + +- (NSArray> *)formats { + NSMutableArray> *wrappedFormats = [NSMutableArray array]; + for (AVCaptureDeviceFormat *format in self.device.formats) { + [wrappedFormats addObject:[[FLTDefaultCaptureDeviceFormat alloc] initWithFormat:format]]; + } + return wrappedFormats; +} + +- (void)setActiveFormat:(id)format { + self.device.activeFormat = format.format; +} + +// Flash/Torch +- (BOOL)hasFlash { + return self.device.hasFlash; +} + +- (BOOL)hasTorch { + return self.device.hasTorch; +} + +- (BOOL)isTorchAvailable { + return self.device.isTorchAvailable; +} + +- (AVCaptureTorchMode)torchMode { + return self.device.torchMode; +} + +- (void)setTorchMode:(AVCaptureTorchMode)torchMode { + self.device.torchMode = torchMode; +} + +- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { + return [self.device isFlashModeSupported:mode]; +} + +// Focus +- (BOOL)isFocusPointOfInterestSupported { + return self.device.isFocusPointOfInterestSupported; +} + +- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { + return [self.device isFocusModeSupported:mode]; +} + +- (void)setFocusMode:(AVCaptureFocusMode)focusMode { + self.device.focusMode = focusMode; +} + +- (void)setFocusPointOfInterest:(CGPoint)point { + self.device.focusPointOfInterest = point; +} + +// Exposure +- (BOOL)isExposurePointOfInterestSupported { + return self.device.isExposurePointOfInterestSupported; +} + +- (void)setExposureMode:(AVCaptureExposureMode)exposureMode { + self.device.exposureMode = exposureMode; +} + +- (void)setExposurePointOfInterest:(CGPoint)point { + self.device.exposurePointOfInterest = point; +} + +- (float)minExposureTargetBias { + return self.device.minExposureTargetBias; +} + +- (float)maxExposureTargetBias { + return self.device.maxExposureTargetBias; +} + +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { + [self.device setExposureTargetBias:bias completionHandler:handler]; +} + +- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { + return [self.device isExposureModeSupported:mode]; +} + +// Zoom +- (float)maxAvailableVideoZoomFactor { + return self.device.maxAvailableVideoZoomFactor; +} + +- (float)minAvailableVideoZoomFactor { + return self.device.minAvailableVideoZoomFactor; +} + +- (float)videoZoomFactor { + return self.device.videoZoomFactor; +} + +- (void)setVideoZoomFactor:(float)factor { + self.device.videoZoomFactor = factor; +} + +// Camera Properties +- (float)lensAperture { + return self.device.lensAperture; +} + +- (CMTime)exposureDuration { + return self.device.exposureDuration; +} + +- (float)ISO { + return self.device.ISO; +} + +// Configuration Lock +- (BOOL)lockForConfiguration:(NSError **)error { + return [self.device lockForConfiguration:error]; +} + +- (void)unlockForConfiguration { + [self.device unlockForConfiguration]; +} + +- (CMTime)activeVideoMinFrameDuration { + return self.device.activeVideoMinFrameDuration; +} + +- (void)setActiveVideoMinFrameDuration:(CMTime)duration { + self.device.activeVideoMinFrameDuration = duration; +} + +- (CMTime)activeVideoMaxFrameDuration { + return self.device.activeVideoMaxFrameDuration; +} + +- (void)setActiveVideoMaxFrameDuration:(CMTime)duration { + self.device.activeVideoMaxFrameDuration = duration; +} + +- (id)createInput:(NSError *_Nullable *_Nullable)error { + return [[FLTDefaultCaptureInput alloc] + initWithInput:[AVCaptureDeviceInput deviceInputWithDevice:_device error:error]]; +} + +@end + +@interface FLTDefaultCaptureDeviceFormat () +@property(nonatomic, strong) AVCaptureDeviceFormat *format; +@end + +@implementation FLTDefaultCaptureDeviceFormat + +- (instancetype)initWithFormat:(AVCaptureDeviceFormat *)format { + self = [super init]; + if (self) { + _format = format; + } + return self; +} + +- (CMFormatDescriptionRef)formatDescription { + return _format.formatDescription; +} + +- (NSArray> *)videoSupportedFrameRateRanges { + NSMutableArray> *ranges = [NSMutableArray array]; + for (AVFrameRateRange *range in _format.videoSupportedFrameRateRanges) { + FLTDefaultFrameRateRange *wrapper = [[FLTDefaultFrameRateRange alloc] initWithRange:range]; + [ranges addObject:wrapper]; + } + return ranges; +} + +@end + +@interface FLTDefaultFrameRateRange () +@property(nonatomic, strong) AVFrameRateRange *range; +@end + +@implementation FLTDefaultFrameRateRange + +- (instancetype)initWithRange:(AVFrameRateRange *)range { + self = [super init]; + if (self) { + _range = range; + } + return self; +} + +- (float)minFrameRate { + return self.range.minFrameRate; +} + +- (float)maxFrameRate { + return self.range.maxFrameRate; +} + +@end + +@interface FLTDefaultCaptureInput () +@property(nonatomic, strong) AVCaptureInput *input; +@end + +@implementation FLTDefaultCaptureInput + +- (instancetype)initWithInput:(AVCaptureInput *)input { + self = [super init]; + if (self) { + _input = input; + } + return self; +} + +- (NSArray *)ports { + return self.input.ports; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m new file mode 100644 index 000000000000..b26e9d4c9515 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m @@ -0,0 +1,63 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h" +#import "../include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" + +@implementation FLTDefaultCapturePhotoOutput { + AVCapturePhotoOutput *_photoOutput; +} + +- (instancetype)initWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput { + self = [super init]; + if (self) { + _photoOutput = photoOutput; + } + return self; +} + +- (AVCapturePhotoOutput *)photoOutput { + return _photoOutput; +} + +- (NSArray *)availablePhotoCodecTypes { + return _photoOutput.availablePhotoCodecTypes; +} + +- (void)setHighResolutionCaptureEnabled:(BOOL)enabled { + [_photoOutput setHighResolutionCaptureEnabled:enabled]; +} + +- (BOOL)isHighResolutionCaptureEnabled { + return _photoOutput.isHighResolutionCaptureEnabled; +} + +- (void)capturePhotoWithSettings:(id)settings + delegate:(id)delegate { + [_photoOutput capturePhotoWithSettings:settings.settings delegate:delegate]; +} + +- (nullable AVCaptureConnection *)connectionWithMediaType:(nonnull AVMediaType)mediaType { + return [_photoOutput connectionWithMediaType:mediaType]; +} + +- (NSArray *)supportedFlashModes { + return _photoOutput.supportedFlashModes; +} + +- (void)forwardInvocation:(NSInvocation *)invocation { + NSLog(@"Selector being called: %@", NSStringFromSelector([invocation selector])); + if ([_photoOutput respondsToSelector:[invocation selector]]) { + [invocation invokeWithTarget:_photoOutput]; + } else { + [super forwardInvocation:invocation]; + } +} + +- (BOOL)respondsToSelector:(SEL)aSelector { + NSLog(@"Checking selector: %@", NSStringFromSelector(aSelector)); + return [super respondsToSelector:aSelector] || [_photoOutput respondsToSelector:aSelector]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m new file mode 100644 index 000000000000..8ca88eb6fa15 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m @@ -0,0 +1,51 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" + +@interface FLTDefaultCapturePhotoSettings () +@property(nonatomic, strong) AVCapturePhotoSettings *settings; +@end + +@implementation FLTDefaultCapturePhotoSettings +- (instancetype)initWithSettings:(AVCapturePhotoSettings *)settings { + self = [super init]; + if (self) { + _settings = settings; + } + return self; +} + +- (int64_t)uniqueID { + return _settings.uniqueID; +} + +- (NSDictionary *)format { + return _settings.format; +} + +- (void)setFlashMode:(AVCaptureFlashMode)flashMode { + [_settings setFlashMode:flashMode]; +} + +- (void)setHighResolutionPhotoEnabled:(BOOL)enabled { + [_settings setHighResolutionPhotoEnabled:enabled]; +} + +@end + +@implementation FLTDefaultCapturePhotoSettingsFactory + +- (id)createPhotoSettings { + return [[FLTDefaultCapturePhotoSettings alloc] + initWithSettings:[AVCapturePhotoSettings photoSettings]]; +} + +- (id)createPhotoSettingsWithFormat: + (NSDictionary *)format { + return [[FLTDefaultCapturePhotoSettings alloc] + initWithSettings:[AVCapturePhotoSettings photoSettingsWithFormat:format]]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSession.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSession.m new file mode 100644 index 000000000000..241fe223658d --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSession.m @@ -0,0 +1,98 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCaptureSession.h" +#import "../include/camera_avfoundation/Protocols/FLTCaptureConnection.h" + +@interface FLTDefaultCaptureSession () +@property(nonatomic, strong) AVCaptureSession *captureSession; +@end + +@implementation FLTDefaultCaptureSession + +- (instancetype)initWithCaptureSession:(AVCaptureSession *)session { + self = [super init]; + if (self) { + _captureSession = session; + } + return self; +} + +- (void)beginConfiguration { + [_captureSession beginConfiguration]; +} + +- (void)commitConfiguration { + [_captureSession commitConfiguration]; +} + +- (void)startRunning { + [_captureSession startRunning]; +} + +- (void)stopRunning { + [_captureSession stopRunning]; +} + +- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { + return [_captureSession canSetSessionPreset:preset]; +} + +- (void)addInputWithNoConnections:(AVCaptureInput *)input { + [_captureSession addInputWithNoConnections:input]; +} + +- (void)addOutputWithNoConnections:(AVCaptureOutput *)output { + [_captureSession addOutputWithNoConnections:output]; +} + +- (void)addConnection:(id)connection { + [_captureSession addConnection:connection.connection]; +} + +- (void)addOutput:(AVCaptureOutput *)output { + [_captureSession addOutput:output]; +} + +- (void)removeInput:(AVCaptureInput *)input { + [_captureSession removeInput:input]; +} + +- (void)removeOutput:(AVCaptureOutput *)output { + [_captureSession removeOutput:output]; +} + +- (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { + _captureSession.sessionPreset = sessionPreset; +} + +- (AVCaptureSessionPreset)sessionPreset { + return _captureSession.sessionPreset; +} + +- (NSArray *)inputs { + return _captureSession.inputs; +} + +- (NSArray *)outputs { + return _captureSession.outputs; +} + +- (BOOL)canAddInput:(AVCaptureInput *)input { + return [_captureSession canAddInput:input]; +} + +- (BOOL)canAddOutput:(AVCaptureOutput *)output { + return [_captureSession canAddOutput:output]; +} + +- (BOOL)canAddConnection:(id)connection { + return [_captureSession canAddConnection:connection.connection]; +} + +- (void)addInput:(AVCaptureInput *)input { + [_captureSession addInput:input]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m new file mode 100644 index 000000000000..0f909b2a9fa6 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m @@ -0,0 +1,13 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" + +@implementation FLTDefaultDeviceOrientationProvider + +- (UIDeviceOrientation)orientation { + return [[UIDevice currentDevice] orientation]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannel.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannel.m new file mode 100644 index 000000000000..c3eb17a6dc2e --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannel.m @@ -0,0 +1,27 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +#import "FLTEventChannel.h" + +@interface FLTDefaultEventChannel () +@property(nonatomic, strong) FlutterEventChannel *channel; +@end + +@implementation FLTDefaultEventChannel + +- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel { + self = [super init]; + if (self) { + _channel = channel; + } + return self; +} + +- (void)setStreamHandler:(NSObject *)handler { + [self.channel setStreamHandler:handler]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m new file mode 100644 index 000000000000..458540c2b9fe --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m @@ -0,0 +1,16 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTPermissionService.h" + +@implementation FLTDefaultPermissionService +- (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { + return [AVCaptureDevice authorizationStatusForMediaType:mediaType]; +} + +- (void)requestAccessForMediaType:(AVMediaType)mediaType + completionHandler:(void (^)(BOOL granted))handler { + [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:handler]; +} +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m new file mode 100644 index 000000000000..b2b9dae0ffb6 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m @@ -0,0 +1,22 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTPhotoData.h" + +@implementation FLTDefaultPhotoData + +- (instancetype)initWithData:(NSData *)data { + self = [super init]; + if (self) { + _data = data; + } + return self; +} + +- (BOOL)writeToFile:(NSString *)path + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr { + return [self.data writeToFile:path options:writeOptionsMask error:errorPtr]; +} +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index bc864d174927..18f45ce1be3e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -6,12 +6,24 @@ framework module camera_avfoundation { explicit module Test { header "CameraPlugin_Test.h" - header "CameraPermissionUtils.h" header "CameraProperties.h" header "FLTCam.h" header "FLTCam_Test.h" + header "FLTCamConfiguration.h" header "FLTSavePhotoDelegate_Test.h" header "FLTThreadSafeEventChannel.h" + header "FLTPermissionService.h" + header "FLTCaptureDeviceControlling.h" + header "FLTCameraPermissionManager.h" + header "FLTDeviceOrientationProviding.h" + header "FLTEventChannel.h" + header "FLTCameraDeviceDiscovery.h" + header "FLTCaptureSession.h" + header "FLTCapturePhotoSettings.h" + header "FLTCapturePhotoOutput.h" + header "FLTPhotoData.h" + header "FLTAssetWriter.h" + header "FLTCaptureConnection.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h index 586b2fc87085..0ccff032d1ca 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h @@ -6,5 +6,10 @@ #import "messages.g.h" +#import "FLTCaptureDeviceControlling.h" +#import "FLTCaptureSession.h" + +typedef id (^CaptureNamedDeviceFactory)(NSString *name); + @interface CameraPlugin : NSObject @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h index c29c2f306db8..68029369dc7b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h @@ -6,6 +6,7 @@ #import "CameraPlugin.h" #import "FLTCam.h" +#import "FLTCameraDeviceDiscovery.h" #import "messages.g.h" /// APIs exposed for unit testing. @@ -25,7 +26,10 @@ /// unit testing. - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger - globalAPI:(FCPCameraGlobalEventApi *)globalAPI NS_DESIGNATED_INITIALIZER; + globalAPI:(FCPCameraGlobalEventApi *)globalAPI + deviceDiscovery:(id)deviceDiscovery + sessionFactory:(CaptureSessionFactory)sessionFactory + deviceFactory:(CaptureNamedDeviceFactory)deviceFactory NS_DESIGNATED_INITIALIZER; /// Hide the default public constructor. - (instancetype)init NS_UNAVAILABLE; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index d8f97926b770..831e760faa9a 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -7,7 +7,12 @@ @import Flutter; #import "CameraProperties.h" +#import "FLTAssetWriter.h" +#import "FLTCamConfiguration.h" #import "FLTCamMediaSettingsAVWrapper.h" +#import "FLTCaptureDeviceControlling.h" +#import "FLTCapturePhotoOutput.h" +#import "FLTDeviceOrientationProviding.h" #import "messages.g.h" NS_ASSUME_NONNULL_BEGIN @@ -15,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN /// A class that manages camera's state and performs camera operations. @interface FLTCam : NSObject -@property(readonly, nonatomic) AVCaptureDevice *captureDevice; +@property(readonly, nonatomic) id captureDevice; @property(readonly, nonatomic) CGSize previewSize; @property(assign, nonatomic) BOOL isPreviewPaused; @property(nonatomic, copy) void (^onFrameAvailable)(void); @@ -32,19 +37,10 @@ NS_ASSUME_NONNULL_BEGIN @property(assign, nonatomic) CGFloat maximumAvailableZoomFactor; /// Initializes an `FLTCam` instance. -/// @param cameraName a name used to uniquely identify the camera. -/// @param mediaSettings the media settings configuration parameters -/// @param mediaSettingsAVWrapper AVFoundation wrapper to perform media settings related operations -/// (for dependency injection in unit tests). -/// @param orientation the orientation of camera -/// @param captureSessionQueue the queue on which camera's capture session operations happen. +/// Allows for testing with specified resolution, audio preference, orientation, +/// and direct access to capture sessions and blocks. /// @param error report to the caller if any error happened creating the camera. -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error; +- (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error; /// Informs the Dart side of the plugin of the current camera state and capabilities. - (void)reportInitializationState; @@ -92,7 +88,8 @@ NS_ASSUME_NONNULL_BEGIN /// /// @param focusMode The focus mode that should be applied to the @captureDevice instance. /// @param captureDevice The AVCaptureDevice to which the @focusMode will be applied. -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice; +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode + onDevice:(id)captureDevice; - (void)pausePreview; - (void)resumePreview; - (void)setDescriptionWhileRecording:(NSString *)cameraName diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h new file mode 100644 index 000000000000..9b59b9b5e715 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h @@ -0,0 +1,62 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; +@import Flutter; + +#import "CameraProperties.h" +#import "FLTAssetWriter.h" +#import "FLTCamMediaSettingsAVWrapper.h" +#import "FLTCaptureDeviceControlling.h" +#import "FLTCapturePhotoOutput.h" +#import "FLTDeviceOrientationProviding.h" + +NS_ASSUME_NONNULL_BEGIN + +/// Factory block returning an AVCaptureDevice. +/// Used in tests to inject a device into FLTCam. +typedef id _Nonnull (^CaptureDeviceFactory)(void); + +typedef id _Nonnull (^AudioCaptureDeviceFactory)(void); + +typedef id _Nonnull (^AssetWriterFactory)(NSURL *, AVFileType, + NSError *_Nullable *_Nullable); + +typedef id _Nonnull (^PixelBufferAdaptorFactory)( + id, NSDictionary *_Nullable); + +typedef id _Nonnull (^CaptureSessionFactory)(void); + +/// Determines the video dimensions (width and height) for a given capture device format. +/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. +typedef CMVideoDimensions (^VideoDimensionsForFormat)(id); + +@interface FLTCamConfiguration : NSObject + +- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings + mediaSettingsWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsWrapper + captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + captureSessionQueue:(dispatch_queue_t)captureSessionQueue + captureSessionFactory:(CaptureSessionFactory)captureSessionFactory + audioCaptureDeviceFactory:(AudioCaptureDeviceFactory)audioCaptureDeviceFactory; + +@property(nonatomic, strong) id deviceOrientationProvider; +@property(nonatomic, strong) id videoCaptureSession; +@property(nonatomic, strong) id audioCaptureSession; +@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; +@property(nonatomic, strong) FCPPlatformMediaSettings *mediaSettings; +@property(nonatomic, strong) FLTCamMediaSettingsAVWrapper *mediaSettingsWrapper; +@property(nonatomic, strong) id capturePhotoOutput; +@property(nonatomic, copy) AssetWriterFactory assetWriterFactory; +@property(nonatomic, copy) PixelBufferAdaptorFactory pixelBufferAdaptorFactory; +@property(nonatomic, strong) id photoSettingsFactory; +@property(nonatomic, copy) CaptureDeviceFactory captureDeviceFactory; +@property(nonatomic, copy) CaptureDeviceFactory audioCaptureDeviceFactory; +@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat; +@property(nonatomic, assign) UIDeviceOrientation orientation; + +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h index 144a84eac13f..767ab63e5bab 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h @@ -5,6 +5,10 @@ @import AVFoundation; @import Foundation; +#import "FLTAssetWriter.h" +#import "FLTCaptureDeviceControlling.h" +#import "FLTCaptureSession.h" + NS_ASSUME_NONNULL_BEGIN /** @@ -25,14 +29,15 @@ NS_ASSUME_NONNULL_BEGIN * @param outError The optional error. * @result A BOOL indicating whether the device was successfully locked for configuration. */ -- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError *_Nullable *_Nullable)outError; +- (BOOL)lockDevice:(id)captureDevice + error:(NSError *_Nullable *_Nullable)outError; /** * @method unlockDevice: * @abstract Release exclusive control over device hardware properties. * @param captureDevice The capture device. */ -- (void)unlockDevice:(AVCaptureDevice *)captureDevice; +- (void)unlockDevice:(id)captureDevice; /** * @method beginConfigurationForSession: @@ -40,7 +45,7 @@ NS_ASSUME_NONNULL_BEGIN * operations on a running session into atomic updates. * @param videoCaptureSession The video capture session. */ -- (void)beginConfigurationForSession:(AVCaptureSession *)videoCaptureSession; +- (void)beginConfigurationForSession:(id)videoCaptureSession; /** * @method commitConfigurationForSession: @@ -48,7 +53,7 @@ NS_ASSUME_NONNULL_BEGIN * operations on a running session into atomic updates. * @param videoCaptureSession The video capture session. */ -- (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession; +- (void)commitConfigurationForSession:(id)videoCaptureSession; /** * @method setMinFrameDuration:onDevice: @@ -57,7 +62,8 @@ NS_ASSUME_NONNULL_BEGIN * @param duration The frame duration. * @param captureDevice The capture device */ -- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice; +- (void)setMinFrameDuration:(CMTime)duration + onDevice:(id)captureDevice; /** * @method setMaxFrameDuration:onDevice: @@ -66,7 +72,8 @@ NS_ASSUME_NONNULL_BEGIN * @param duration The frame duration. * @param captureDevice The capture device */ -- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice; +- (void)setMaxFrameDuration:(CMTime)duration + onDevice:(id)captureDevice; /** * @method assetWriterAudioInputWithOutputSettings: @@ -75,7 +82,7 @@ NS_ASSUME_NONNULL_BEGIN * @param outputSettings The settings used for encoding the audio appended to the output. * @result An instance of `AVAssetWriterInput`. */ -- (AVAssetWriterInput *)assetWriterAudioInputWithOutputSettings: +- (id)assetWriterAudioInputWithOutputSettings: (nullable NSDictionary *)outputSettings; /** @@ -85,7 +92,7 @@ NS_ASSUME_NONNULL_BEGIN * @param outputSettings The settings used for encoding the video appended to the output. * @result An instance of `AVAssetWriterInput`. */ -- (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: +- (id)assetWriterVideoInputWithOutputSettings: (nullable NSDictionary *)outputSettings; /** @@ -94,7 +101,7 @@ NS_ASSUME_NONNULL_BEGIN * @param writerInput The `AVAssetWriterInput` object to be added. * @param writer The `AVAssetWriter` object. */ -- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer; +- (void)addInput:(id)writerInput toAssetWriter:(AVAssetWriter *)writer; /** * @method recommendedVideoSettingsForAssetWriterWithFileType:forOutput: diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index d05838f49a70..39ae6f49cdf8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -3,17 +3,14 @@ // found in the LICENSE file. #import "FLTCam.h" +#import "FLTCaptureConnection.h" +#import "FLTCaptureDeviceControlling.h" +#import "FLTCapturePhotoOutput.h" +#import "FLTCaptureSession.h" #import "FLTSavePhotoDelegate.h" -/// Determines the video dimensions (width and height) for a given capture device format. -/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. -typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); - -/// Factory block returning an AVCaptureDevice. -/// Used in tests to inject a device into FLTCam. -typedef AVCaptureDevice * (^CaptureDeviceFactory)(void); - @interface FLTImageStreamHandler : NSObject +- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; /// The queue on which `eventSink` property should be accessed. @property(nonatomic, strong) dispatch_queue_t captureSessionQueue; @@ -33,7 +30,7 @@ typedef AVCaptureDevice * (^CaptureDeviceFactory)(void); @property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput; /// The output for photo capturing. Exposed setter for unit tests. -@property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput; +@property(strong, nonatomic) id capturePhotoOutput; /// True when images from the camera are being streamed. @property(assign, nonatomic) BOOL isStreamingImages; @@ -48,33 +45,9 @@ typedef AVCaptureDevice * (^CaptureDeviceFactory)(void); /// Delegate callback when receiving a new video or audio sample. /// Exposed for unit tests. -- (void)captureOutput:(AVCaptureOutput *)output +- (void)captureOutput:(AVCaptureVideoDataOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection; - -/// Initializes a camera instance. -/// Allows for injecting dependencies that are usually internal. -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error; - -/// Initializes a camera instance. -/// Allows for testing with specified resolution, audio preference, orientation, -/// and direct access to capture sessions and blocks. -- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory - videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat - error:(NSError **)error; + fromConnection:(id)connection; /// Start streaming images. - (void)startImageStreamWithMessenger:(NSObject *)messenger diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPermissionUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h similarity index 71% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPermissionUtils.h rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h index 5cbbab055f34..6bd24bae12fe 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPermissionUtils.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h @@ -5,7 +5,16 @@ @import Foundation; #import -typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *); +#import "FLTPermissionService.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *_Nullable); + +@interface FLTCameraPermissionManager : NSObject +@property(nonatomic, strong) id permissionService; + +- (instancetype)initWithPermissionService:(id)service; /// Requests camera access permission. /// @@ -16,8 +25,8 @@ typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *); /// @param handler if access permission is (or was previously) granted, completion handler will be /// called without error; Otherwise completion handler will be called with error. Handler can be /// called on an arbitrary dispatch queue. -extern void FLTRequestCameraPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler); +- (void)requestCameraPermissionWithCompletionHandler: + (FLTCameraPermissionRequestCompletionHandler)handler; /// Requests audio access permission. /// @@ -28,5 +37,9 @@ extern void FLTRequestCameraPermissionWithCompletionHandler( /// @param handler if access permission is (or was previously) granted, completion handler will be /// called without error; Otherwise completion handler will be called with error. Handler can be /// called on an arbitrary dispatch queue. -extern void FLTRequestAudioPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler); +- (void)requestAudioPermissionWithCompletionHandler: + (FLTCameraPermissionRequestCompletionHandler)handler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h index 79539e4bd40e..ac72be4c2c4b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#import "FLTPhotoData.h" #import "FLTSavePhotoDelegate.h" /// API exposed for unit tests. @@ -20,5 +21,5 @@ /// @param error the capture error. /// @param photoDataProvider a closure that provides photo data. - (void)handlePhotoCaptureResultWithError:(NSError *)error - photoDataProvider:(NSData * (^)(void))photoDataProvider; + photoDataProvider:(id (^)(void))photoDataProvider; @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h index 20a1d4023a31..b942b687dc6d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h @@ -4,6 +4,8 @@ #import +#import "FLTEventChannel.h" + NS_ASSUME_NONNULL_BEGIN /// A thread safe wrapper for FlutterEventChannel that can be called from any thread, by dispatching @@ -12,7 +14,7 @@ NS_ASSUME_NONNULL_BEGIN /// Creates a FLTThreadSafeEventChannel by wrapping a FlutterEventChannel object. /// @param channel The FlutterEventChannel object to be wrapped. -- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel; +- (instancetype)initWithEventChannel:(id)channel; /// Registers a handler on the main thread for stream setup requests from the Flutter side. /// The completion block runs on the main thread. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h new file mode 100644 index 000000000000..1a9dffb117ab --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h @@ -0,0 +1,43 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTAssetWriter +@property(nonatomic, readonly) AVAssetWriterStatus status; +@property(readonly, nullable) NSError *error; +- (BOOL)startWriting; +- (void)finishWritingWithCompletionHandler:(void (^)(void))handler; +- (void)startSessionAtSourceTime:(CMTime)startTime; +- (void)addInput:(AVAssetWriterInput *)input; +@end + +@protocol FLTAssetWriterInput +@property(nonatomic, readonly) AVAssetWriterInput *input; +@property(nonatomic, assign) BOOL expectsMediaDataInRealTime; +@property(nonatomic, readonly) BOOL isReadyForMoreMediaData; +- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer; +@end + +@protocol FLTPixelBufferAdaptor +- (BOOL)appendPixelBuffer:(CVPixelBufferRef)pixelBuffer + withPresentationTime:(CMTime)presentationTime; +@end + +@interface FLTDefaultAssetWriter : NSObject +- (instancetype)initWithURL:(NSURL *)url fileType:(AVFileType)fileType error:(NSError **)error; +@end + +@interface FLTDefaultAssetWriterInput : NSObject +- (instancetype)initWithInput:(AVAssetWriterInput *)input; +@end + +@interface FLTDefaultPixelBufferAdaptor : NSObject +- (instancetype)initWithAdaptor:(AVAssetWriterInputPixelBufferAdaptor *)adaptor; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h new file mode 100644 index 000000000000..cd4e8e92b096 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h @@ -0,0 +1,22 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; + +#import "FLTCaptureDeviceControlling.h" +#import "messages.g.h" + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCameraDeviceDiscovery +- (NSArray> *) + discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position; +@end + +@interface FLTDefaultCameraDeviceDiscovery : NSObject +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h new file mode 100644 index 000000000000..e814745eb57a --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h @@ -0,0 +1,22 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCaptureConnection +@property(nonatomic, readonly) AVCaptureConnection *connection; +@property(nonatomic) BOOL videoMirrored; +@property(nonatomic) AVCaptureVideoOrientation videoOrientation; +@property(nonatomic, readonly) NSArray *inputPorts; +@property(nonatomic, readonly) BOOL isVideoMirroringSupported; +@property(nonatomic, readonly) BOOL isVideoOrientationSupported; +@end + +@interface FLTDefaultCaptureConnection : NSObject +- (instancetype)initWithConnection:(AVCaptureConnection *)connection; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h new file mode 100644 index 000000000000..c73ec017eb33 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h @@ -0,0 +1,102 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTFrameRateRange +@property(readonly, nonatomic) float minFrameRate; +@property(readonly, nonatomic) float maxFrameRate; +@end + +@protocol FLTCaptureDeviceFormat +@property(nonatomic, readonly) AVCaptureDeviceFormat *format; +@property(nonatomic, readonly) CMFormatDescriptionRef formatDescription; +@property(nonatomic, readonly) NSArray> *videoSupportedFrameRateRanges; +@end + +@protocol FLTCaptureInput +@property(nonatomic, readonly) AVCaptureInput *input; +@property(nonatomic, readonly) NSArray *ports; +@end + +@protocol FLTCaptureDeviceControlling + +- (NSString *)uniqueID; + +// Position/Orientation +- (AVCaptureDevicePosition)position; + +// Format/Configuration +- (id)activeFormat; +- (NSArray> *)formats; +- (void)setActiveFormat:(id)format; + +// Flash/Torch +- (BOOL)hasFlash; +- (BOOL)hasTorch; +- (BOOL)isTorchAvailable; +- (AVCaptureTorchMode)torchMode; +- (void)setTorchMode:(AVCaptureTorchMode)torchMode; +- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode; + +// Focus +- (BOOL)isFocusPointOfInterestSupported; +- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode; +- (void)setFocusMode:(AVCaptureFocusMode)focusMode; +- (void)setFocusPointOfInterest:(CGPoint)point; + +// Exposure +- (BOOL)isExposurePointOfInterestSupported; +- (void)setExposureMode:(AVCaptureExposureMode)exposureMode; +- (void)setExposurePointOfInterest:(CGPoint)point; +- (float)minExposureTargetBias; +- (float)maxExposureTargetBias; +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^_Nullable)(CMTime))handler; +- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode; + +// Zoom +- (float)maxAvailableVideoZoomFactor; +- (float)minAvailableVideoZoomFactor; +- (float)videoZoomFactor; +- (void)setVideoZoomFactor:(float)factor; + +// Camera Properties +- (float)lensAperture; +- (CMTime)exposureDuration; +- (float)ISO; + +// Configuration Lock +- (BOOL)lockForConfiguration:(NSError **)error; +- (void)unlockForConfiguration; + +// Frame Duration +- (CMTime)activeVideoMinFrameDuration; +- (void)setActiveVideoMinFrameDuration:(CMTime)duration; +- (CMTime)activeVideoMaxFrameDuration; +- (void)setActiveVideoMaxFrameDuration:(CMTime)duration; + +- (id)createInput:(NSError *_Nullable *_Nullable)error; + +@end + +@interface FLTDefaultCaptureDeviceController : NSObject +- (instancetype)initWithDevice:(AVCaptureDevice *)device; +@end + +@interface FLTDefaultCaptureDeviceFormat : NSObject +- (instancetype)initWithFormat:(AVCaptureDeviceFormat *)format; +@end + +@interface FLTDefaultFrameRateRange : NSObject +- (instancetype)initWithRange:(AVFrameRateRange *)range; +@end + +@interface FLTDefaultCaptureInput : NSObject +- (instancetype)initWithInput:(AVCaptureInput *)input; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h new file mode 100644 index 000000000000..1a4beb19c726 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h @@ -0,0 +1,29 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +#import "FLTCapturePhotoOutput.h" +#import "FLTCapturePhotoSettings.h" + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCapturePhotoOutput +@property(nonatomic, readonly) AVCapturePhotoOutput *photoOutput; +@property(nonatomic, readonly) NSArray *availablePhotoCodecTypes; +@property(nonatomic, assign, getter=isHighResolutionCaptureEnabled) + BOOL highResolutionCaptureEnabled; +@property(nonatomic, readonly) NSArray *supportedFlashModes; + +- (void)capturePhotoWithSettings:(id)settings + delegate:(id)delegate; +- (nullable AVCaptureConnection *)connectionWithMediaType:(AVMediaType)mediaType; +@end + +@interface FLTDefaultCapturePhotoOutput : NSObject +- (instancetype)initWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h new file mode 100644 index 000000000000..1ed4397320e6 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h @@ -0,0 +1,31 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCapturePhotoSettings +@property(nonatomic, readonly) AVCapturePhotoSettings *settings; +@property(readonly, nonatomic) int64_t uniqueID; +@property(nonatomic, copy, readonly) NSDictionary *format; + +- (void)setFlashMode:(AVCaptureFlashMode)flashMode; +- (void)setHighResolutionPhotoEnabled:(BOOL)enabled; +@end + +@protocol FLTCapturePhotoSettingsFactory +- (id)createPhotoSettings; +- (id)createPhotoSettingsWithFormat:(NSDictionary *)format; +@end + +@interface FLTDefaultCapturePhotoSettings : NSObject +- (instancetype)initWithSettings:(AVCapturePhotoSettings *)settings; +@end + +@interface FLTDefaultCapturePhotoSettingsFactory : NSObject +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSession.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSession.h new file mode 100644 index 000000000000..9428b57d414f --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSession.h @@ -0,0 +1,39 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; + +#import "FLTCaptureDeviceControlling.h" + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCaptureSession + +@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; +@property(nonatomic, readonly) NSArray *inputs; +@property(nonatomic, readonly) NSArray *outputs; + +- (void)beginConfiguration; +- (void)commitConfiguration; +- (void)startRunning; +- (void)stopRunning; +- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset; +- (void)addInputWithNoConnections:(AVCaptureInput *)input; +- (void)addOutputWithNoConnections:(AVCaptureOutput *)output; +- (void)addConnection:(AVCaptureConnection *)connection; +- (void)addOutput:(AVCaptureOutput *)output; +- (void)removeInput:(AVCaptureInput *)input; +- (void)removeOutput:(AVCaptureOutput *)output; +- (BOOL)canAddInput:(AVCaptureInput *)input; +- (BOOL)canAddOutput:(AVCaptureOutput *)output; +- (BOOL)canAddConnection:(AVCaptureConnection *)connection; +- (void)addInput:(AVCaptureInput *)input; + +@end + +@interface FLTDefaultCaptureSession : NSObject +- (instancetype)initWithCaptureSession:(AVCaptureSession *)session; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h new file mode 100644 index 000000000000..e0b39f9b9e21 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h @@ -0,0 +1,17 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTDeviceOrientationProviding +- (UIDeviceOrientation)orientation; +@end + +@interface FLTDefaultDeviceOrientationProvider : NSObject +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannel.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannel.h new file mode 100644 index 000000000000..fa509c04dea6 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannel.h @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTEventChannel +- (void)setStreamHandler:(nullable NSObject *)handler; +@end + +/// The default method channel that wraps FlutterMethodChannel +@interface FLTDefaultEventChannel : NSObject +- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h new file mode 100644 index 000000000000..7ab7cdfbbd8c --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTPermissionService +- (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType; +- (void)requestAccessForMediaType:(AVMediaType)mediaType + completionHandler:(void (^)(BOOL granted))handler; +@end + +@interface FLTDefaultPermissionService : NSObject +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h new file mode 100644 index 000000000000..099b7b5c6067 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h @@ -0,0 +1,20 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTPhotoData +- (BOOL)writeToFile:(NSString *)path + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr; +@end + +@interface FLTDefaultPhotoData : NSObject +@property(nonatomic, strong, readonly) NSData *data; +- (instancetype)initWithData:(NSData *)data; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h index a7e22da716d0..e230a53508fa 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h @@ -7,7 +7,7 @@ NS_ASSUME_NONNULL_BEGIN /// Queue-specific context data to be associated with the capture session queue. -extern const char* FLTCaptureSessionQueueSpecific; +extern const char *FLTCaptureSessionQueueSpecific; /// Ensures the given block to be run on the main queue. /// If caller site is already on the main queue, the block will be run