From c3f703e9106df51e1e1fc32c3be971ecf8fbe61e Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 24 Oct 2018 12:55:50 -0700 Subject: [PATCH 01/34] Start of Android side of byte stream passing --- .../flutter/plugins/camera/CameraPlugin.java | 144 +++++++++++++++++- 1 file changed, 136 insertions(+), 8 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index 7cd827b3cf04..32a25cea179d 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -201,6 +201,24 @@ public void onMethodCall(MethodCall call, final Result result) { camera.stopVideoRecording(result); break; } + case "startByteStream": + { + try { + camera.startPreviewWithByteStream(); + result.success(null); + } catch (CameraAccessException e) { + result.error("CameraAccess", e.getMessage(), null); + } + } + case "stopByteStream": + { + try { + camera.startPreview(); + result.success(null); + } catch (CameraAccessException e) { + result.error("CameraAccess", e.getMessage(), null); + } + } case "dispose": { if (camera != null) { @@ -246,7 +264,8 @@ private class Camera { private CameraDevice cameraDevice; private CameraCaptureSession cameraCaptureSession; private EventChannel.EventSink eventSink; - private ImageReader imageReader; + private ImageReader pictureImageReader; + private ImageReader byteImageReader; // Used to pass bytes to dart side. private int sensorOrientation; private boolean isFrontFacing; private String cameraName; @@ -432,9 +451,13 @@ private void open(@Nullable final Result result) { if (result != null) result.error("cameraPermission", "Camera permission not granted", null); } else { try { - imageReader = + pictureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2); + byteImageReader = + ImageReader.newInstance( + previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); + cameraManager.openCamera( cameraName, new CameraDevice.StateCallback() { @@ -527,7 +550,7 @@ private void takePicture(String filePath, @NonNull final Result result) { return; } - imageReader.setOnImageAvailableListener( + pictureImageReader.setOnImageAvailableListener( new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { @@ -545,7 +568,7 @@ public void onImageAvailable(ImageReader reader) { try { final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); - captureBuilder.addTarget(imageReader.getSurface()); + captureBuilder.addTarget(pictureImageReader.getSurface()); int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); int displayOrientation = ORIENTATIONS.get(displayRotation); if (isFrontFacing) displayOrientation = -displayOrientation; @@ -675,7 +698,7 @@ private void startPreview() throws CameraAccessException { surfaces.add(previewSurface); captureRequestBuilder.addTarget(previewSurface); - surfaces.add(imageReader.getSurface()); + surfaces.add(pictureImageReader.getSurface()); cameraDevice.createCaptureSession( surfaces, @@ -705,6 +728,107 @@ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession null); } + private void startPreviewWithByteStream() throws CameraAccessException { + closeCaptureSession(); + + SurfaceTexture surfaceTexture = textureEntry.surfaceTexture(); + surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); + + captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); + + List surfaces = new ArrayList<>(); + + Surface previewSurface = new Surface(surfaceTexture); + surfaces.add(previewSurface); + captureRequestBuilder.addTarget(previewSurface); + + surfaces.add(byteImageReader.getSurface()); + captureRequestBuilder.addTarget(byteImageReader.getSurface()); + + cameraDevice.createCaptureSession( + surfaces, + new CameraCaptureSession.StateCallback() { + @Override + public void onConfigured(@NonNull CameraCaptureSession session) { + if (cameraDevice == null) { + sendErrorEvent("The camera was closed during configuration."); + return; + } + try { + cameraCaptureSession = session; + captureRequestBuilder.set( + CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); + cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null); + } catch (CameraAccessException e) { + sendErrorEvent(e.getMessage()); + } + } + @Override + public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { + sendErrorEvent("Failed to configure the camera for streaming bytes."); + } + }, + null); + + registerByteStreamEventChannel(); + } + + private void registerByteStreamEventChannel() { + final EventChannel cameraChannel = + new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/bytes"); + + cameraChannel.setStreamHandler(new EventChannel.StreamHandler() { + @Override + public void onListen(Object o, EventChannel.EventSink eventSink) { + setByteStreamImageAvailableListener(eventSink); + } + + @Override + public void onCancel(Object o) { + byteImageReader.setOnImageAvailableListener(null, null); + } + }); + } + + private void setByteStreamImageAvailableListener(final EventChannel.EventSink eventSink) { + byteImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(final ImageReader reader) { + new Runnable() { + @Override + public void run() { + Image img = reader.acquireLatestImage(); + if (img == null) return; + + eventSink.success(YUV_420_888toNV21(img)); + img.close(); + } + }; + } + }, null); + } + + private byte[] YUV_420_888toNV21(Image image) { + byte[] nv21; + + ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); + ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); + ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); + + int ySize = yBuffer.remaining(); + int uSize = uBuffer.remaining(); + int vSize = vBuffer.remaining(); + + nv21 = new byte[ySize + uSize + vSize]; + + //U and V are swapped + yBuffer.get(nv21, 0, ySize); + vBuffer.get(nv21, ySize, vSize); + uBuffer.get(nv21, ySize + vSize, uSize); + + return nv21; + } + private void sendErrorEvent(String errorDescription) { if (eventSink != null) { Map event = new HashMap<>(); @@ -728,9 +852,13 @@ private void close() { cameraDevice.close(); cameraDevice = null; } - if (imageReader != null) { - imageReader.close(); - imageReader = null; + if (pictureImageReader != null) { + pictureImageReader.close(); + pictureImageReader = null; + } + if (byteImageReader != null) { + byteImageReader.close(); + byteImageReader = null; } if (mediaRecorder != null) { mediaRecorder.reset(); From f310580a326b1397e08d1bc1be69f8ef5544cb7c Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 25 Oct 2018 11:08:39 -0700 Subject: [PATCH 02/34] dart side of byte streaming --- packages/camera/lib/camera.dart | 86 ++++++++++++++++++++++++++++++++- 1 file changed, 84 insertions(+), 2 deletions(-) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 153bbeb69b98..2025d0d96dc7 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -1,4 +1,5 @@ import 'dart:async'; +import 'dart:typed_data'; import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; @@ -10,6 +11,8 @@ enum CameraLensDirection { front, back, external } enum ResolutionPreset { low, medium, high } +typedef void OnLatestImageAvailable(Uint8List bytes); + /// Returns the resolution preset as a String. String serializeResolutionPreset(ResolutionPreset resolutionPreset) { switch (resolutionPreset) { @@ -110,13 +113,15 @@ class CameraValue { this.previewSize, this.isRecordingVideo, this.isTakingPicture, + this.isStreamingBytes, }); const CameraValue.uninitialized() : this( isInitialized: false, isRecordingVideo: false, - isTakingPicture: false); + isTakingPicture: false, + isStreamingBytes: false); /// True after [CameraController.initialize] has completed successfully. final bool isInitialized; @@ -127,6 +132,9 @@ class CameraValue { /// True when the camera is recording (not the same as previewing). final bool isRecordingVideo; + /// True when bytes from the camera are being streamed. + final bool isStreamingBytes; + final String errorDescription; /// The size of the preview in pixels. @@ -145,6 +153,7 @@ class CameraValue { bool isInitialized, bool isRecordingVideo, bool isTakingPicture, + bool isStreamingBytes, String errorDescription, Size previewSize, }) { @@ -154,6 +163,7 @@ class CameraValue { previewSize: previewSize ?? this.previewSize, isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, isTakingPicture: isTakingPicture ?? this.isTakingPicture, + isStreamingBytes: isStreamingBytes ?? this.isStreamingBytes, ); } @@ -164,7 +174,8 @@ class CameraValue { 'isRecordingVideo: $isRecordingVideo, ' 'isInitialized: $isInitialized, ' 'errorDescription: $errorDescription, ' - 'previewSize: $previewSize)'; + 'previewSize: $previewSize, ' + 'isStreamingBytes: $isStreamingBytes)'; } } @@ -185,6 +196,7 @@ class CameraController extends ValueNotifier { int _textureId; bool _isDisposed = false; StreamSubscription _eventSubscription; + StreamSubscription _byteStreamSubscription; Completer _creatingCompleter; /// Initializes the camera on the device. @@ -276,6 +288,69 @@ class CameraController extends ValueNotifier { } } + Future startByteStream(OnLatestImageAvailable onAvailable) async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'startByteStream was called on uninitialized CameraController.', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'startByteStream was called while a video is being recorded.', + ); + } + if (value.isStreamingBytes) { + throw CameraException( + 'A camera has started streaming bytes.', + 'startByteStream was called while a camera was streaming bytes.', + ); + } + + try { + await _channel.invokeMethod('startByteStream'); + value = value.copyWith(isStreamingBytes: true); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + const EventChannel cameraEventChannel = + EventChannel('plugins.flutter.io/camera/bytes'); + _byteStreamSubscription = + cameraEventChannel.receiveBroadcastStream().listen(onAvailable); + } + + Future stopByteStream() async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'stopByteStream was called on uninitialized CameraController.', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'stopByteStream was called while a video is being recorded.', + ); + } + if (!value.isStreamingBytes) { + throw CameraException( + 'No camera is streaming bytes', + 'stopByteStream was called when no camera is streaming bytes.', + ); + } + + try { + value = value.copyWith(isStreamingBytes: false); + await _channel.invokeMethod('stopByteStream'); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + + _byteStreamSubscription.cancel(); + _byteStreamSubscription = null; + } + /// Start a video recording and save the file to [path]. /// /// A path can for example be obtained using @@ -299,6 +374,13 @@ class CameraController extends ValueNotifier { 'startVideoRecording was called when a recording is already started.', ); } + if (value.isStreamingBytes) { + throw CameraException( + 'A camera has started streaming bytes.', + 'startVideoRecording was called while a camera was streaming bytes.', + ); + } + try { await _channel.invokeMethod( 'startVideoRecording', From 87fdb602aa19e2ec02190cb72c4584847d5f29f9 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 25 Oct 2018 12:18:55 -0700 Subject: [PATCH 03/34] Fix android streaming --- .../io/flutter/plugins/camera/CameraPlugin.java | 15 ++++++--------- packages/camera/lib/camera.dart | 6 ++++-- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index 32a25cea179d..ba2f7449f97e 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -209,6 +209,7 @@ public void onMethodCall(MethodCall call, final Result result) { } catch (CameraAccessException e) { result.error("CameraAccess", e.getMessage(), null); } + break; } case "stopByteStream": { @@ -218,6 +219,7 @@ public void onMethodCall(MethodCall call, final Result result) { } catch (CameraAccessException e) { result.error("CameraAccess", e.getMessage(), null); } + break; } case "dispose": { @@ -794,16 +796,11 @@ private void setByteStreamImageAvailableListener(final EventChannel.EventSink ev byteImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(final ImageReader reader) { - new Runnable() { - @Override - public void run() { - Image img = reader.acquireLatestImage(); - if (img == null) return; + Image img = reader.acquireLatestImage(); + if (img == null) return; - eventSink.success(YUV_420_888toNV21(img)); - img.close(); - } - }; + eventSink.success(YUV_420_888toNV21(img)); + img.close(); } }, null); } diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 2025d0d96dc7..e8c6538e61d5 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -196,7 +196,7 @@ class CameraController extends ValueNotifier { int _textureId; bool _isDisposed = false; StreamSubscription _eventSubscription; - StreamSubscription _byteStreamSubscription; + StreamSubscription _byteStreamSubscription; Completer _creatingCompleter; /// Initializes the camera on the device. @@ -317,7 +317,9 @@ class CameraController extends ValueNotifier { const EventChannel cameraEventChannel = EventChannel('plugins.flutter.io/camera/bytes'); _byteStreamSubscription = - cameraEventChannel.receiveBroadcastStream().listen(onAvailable); + cameraEventChannel.receiveBroadcastStream().listen((dynamic bytes) { + onAvailable(bytes); + }); } Future stopByteStream() async { From 8d353adb3032aad84e1d98b46e34a35a860c336b Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 14 Nov 2018 16:30:21 -0800 Subject: [PATCH 04/34] Add ios byte streaming --- .../ios/Runner.xcodeproj/project.pbxproj | 24 ++----- packages/camera/ios/Classes/CameraPlugin.m | 67 +++++++++++++++++++ 2 files changed, 71 insertions(+), 20 deletions(-) diff --git a/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj index 5a54057fee45..f3ac434ae2e7 100644 --- a/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -161,7 +161,6 @@ 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, FE224661708E6DA2A0F8B952 /* [CP] Embed Pods Frameworks */, - EACF0929FF12B6CC70C2D6BE /* [CP] Copy Pods Resources */, ); buildRules = ( ); @@ -183,7 +182,7 @@ TargetAttributes = { 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; - DevelopmentTeam = EQHXZ8M8AV; + DevelopmentTeam = S8QB4VV633; }; }; }; @@ -269,21 +268,6 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; - EACF0929FF12B6CC70C2D6BE /* [CP] Copy Pods Resources */ = { - isa = PBXShellScriptBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - inputPaths = ( - ); - name = "[CP] Copy Pods Resources"; - outputPaths = ( - ); - runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; - showEnvVarsInLog = 0; - }; FE224661708E6DA2A0F8B952 /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -291,7 +275,7 @@ ); inputPaths = ( "${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh", - "${PODS_ROOT}/../../../../../../flutter/bin/cache/artifacts/engine/ios-release/Flutter.framework", + "${PODS_ROOT}/../.symlinks/flutter/ios/Flutter.framework", ); name = "[CP] Embed Pods Frameworks"; outputPaths = ( @@ -433,7 +417,7 @@ buildSettings = { ARCHS = arm64; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = EQHXZ8M8AV; + DEVELOPMENT_TEAM = S8QB4VV633; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -456,7 +440,7 @@ buildSettings = { ARCHS = arm64; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = EQHXZ8M8AV; + DEVELOPMENT_TEAM = S8QB4VV633; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 0bfb7515c36c..0a9778ffb05b 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -21,6 +21,21 @@ @interface FLTSavePhotoDelegate : NSObject - initWithPath:(NSString *)filename result:(FlutterResult)result; @end +@interface FLTByteStreamHandler : NSObject +@property(readonly, nonatomic) FlutterEventSink eventSink; +@end + +@implementation FLTByteStreamHandler {} +- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { + return nil; +} + +- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments eventSink:(nonnull FlutterEventSink)events { + _eventSink = events; + return nil; +} +@end + @implementation FLTSavePhotoDelegate { /// Used to keep the delegate alive until didFinishProcessingPhotoSampleBuffer. FLTSavePhotoDelegate *selfReference; @@ -64,6 +79,7 @@ @interface FLTCam : NSObject *)messenger { + if (!_isStreamingBytes) { + FlutterEventChannel *eventChannel = [FlutterEventChannel + eventChannelWithName:@"plugins.flutter.io/camera/bytes" + binaryMessenger:messenger]; + + _byteStreamHandler = [[FLTByteStreamHandler alloc] init]; + [eventChannel setStreamHandler:_byteStreamHandler]; + + _isStreamingBytes = YES; + } else { + _eventSink(@{@"event" : @"error", @"errorDescription" : @"Bytes from camera are already streaming!"}); + } +} + +- (void)stopByteStream { + if (_isStreamingBytes) { + _isStreamingBytes = NO; + _byteStreamHandler = nil; + } else { + _eventSink(@{@"event" : @"error", @"errorDescription" : @"Bytes from camera are not streaming!"}); + } +} + - (BOOL)setupWriterForPath:(NSString *)path { NSError *error = nil; NSURL *outputURL; @@ -492,6 +553,12 @@ - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result }); [cam start]; } + } else if ([@"startByteStream" isEqualToString:call.method]) { + [_camera startByteStreamWithMessenger:_messenger]; + result(nil); + } else if ([@"stopByteStream" isEqualToString:call.method]) { + [_camera stopByteStream]; + result(nil); } else { NSDictionary *argsMap = call.arguments; NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue; From 0291a29ca134cb1620edbca5116c53a7c14abbc6 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 15 Nov 2018 20:15:12 -0800 Subject: [PATCH 05/34] Convert buffer to uiimage to pass over --- packages/camera/ios/Classes/CameraPlugin.m | 34 ++++++++++++++++++---- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 0a9778ffb05b..b3864b28b31e 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -205,19 +205,43 @@ - (void)captureOutput:(AVCaptureOutput *)output if (_isStreamingBytes) { if (!_byteStreamHandler.eventSink) return; - CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CVPixelBufferLockBaseAddress(pixelBuffer, 0); + // Get a CMSampleBuffer's Core Video image buffer for the media data + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + // Lock the base address of the pixel buffer + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + // Get the number of bytes per row for the pixel buffer + void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer); + + // Get the number of bytes per row for the pixel buffer size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); + // Get the pixel buffer width and height + size_t width = CVPixelBufferGetWidth(pixelBuffer); size_t height = CVPixelBufferGetHeight(pixelBuffer); - void *src_buff = CVPixelBufferGetBaseAddress(pixelBuffer); + // Create a device-dependent RGB color space + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + + // Create a bitmap graphics context with the sample buffer data + CGBitmapInfo bitmapInfo = + (kCGBitmapAlphaInfoMask & kCGImageAlphaPremultipliedFirst) | kCGBitmapByteOrder32Little; + + CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo); - NSData *data = [NSData dataWithBytes:src_buff length:bytesPerRow * height]; + // Create a Quartz image from the pixel data in the bitmap graphics context + CGImageRef quartzImage = CGBitmapContextCreateImage(context); + + // Unlock the pixel buffer + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + // Create an image object from the Quartz image + NSData *data = UIImageJPEGRepresentation([[UIImage alloc] initWithCGImage:quartzImage], 1); FlutterStandardTypedData *eventData = [FlutterStandardTypedData typedDataWithBytes:data]; - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); _byteStreamHandler.eventSink(eventData); + + CGImageRelease(quartzImage); } if (_isRecording) { if (_videoWriter.status == AVAssetWriterStatusFailed) { From 31f746aa477d6e35ae066fe34e30fc6a7b676b6e Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Sun, 18 Nov 2018 15:55:11 -0800 Subject: [PATCH 06/34] formatting --- .../flutter/plugins/camera/CameraPlugin.java | 47 ++++++++++--------- packages/camera/ios/Classes/CameraPlugin.m | 24 ++++++---- 2 files changed, 40 insertions(+), 31 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index d9fef42a9b93..77f0068db92f 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -757,7 +757,8 @@ private void startPreviewWithByteStream() throws CameraAccessException { SurfaceTexture surfaceTexture = textureEntry.surfaceTexture(); surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); - captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); + captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); List surfaces = new ArrayList<>(); @@ -786,6 +787,7 @@ public void onConfigured(@NonNull CameraCaptureSession session) { sendErrorEvent(e.getMessage()); } } + @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { sendErrorEvent("Failed to configure the camera for streaming bytes."); @@ -800,30 +802,33 @@ private void registerByteStreamEventChannel() { final EventChannel cameraChannel = new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/bytes"); - cameraChannel.setStreamHandler(new EventChannel.StreamHandler() { - @Override - public void onListen(Object o, EventChannel.EventSink eventSink) { - setByteStreamImageAvailableListener(eventSink); - } + cameraChannel.setStreamHandler( + new EventChannel.StreamHandler() { + @Override + public void onListen(Object o, EventChannel.EventSink eventSink) { + setByteStreamImageAvailableListener(eventSink); + } - @Override - public void onCancel(Object o) { - byteImageReader.setOnImageAvailableListener(null, null); - } - }); + @Override + public void onCancel(Object o) { + byteImageReader.setOnImageAvailableListener(null, null); + } + }); } private void setByteStreamImageAvailableListener(final EventChannel.EventSink eventSink) { - byteImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { - @Override - public void onImageAvailable(final ImageReader reader) { - Image img = reader.acquireLatestImage(); - if (img == null) return; - - eventSink.success(YUV_420_888toNV21(img)); - img.close(); - } - }, null); + byteImageReader.setOnImageAvailableListener( + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(final ImageReader reader) { + Image img = reader.acquireLatestImage(); + if (img == null) return; + + eventSink.success(YUV_420_888toNV21(img)); + img.close(); + } + }, + null); } private byte[] YUV_420_888toNV21(Image image) { diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index b3864b28b31e..c17a25ef9ae1 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -25,12 +25,14 @@ @interface FLTByteStreamHandler : NSObject @property(readonly, nonatomic) FlutterEventSink eventSink; @end -@implementation FLTByteStreamHandler {} +@implementation FLTByteStreamHandler { +} - (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { return nil; } -- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments eventSink:(nonnull FlutterEventSink)events { +- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)events { _eventSink = events; return nil; } @@ -210,7 +212,6 @@ - (void)captureOutput:(AVCaptureOutput *)output // Lock the base address of the pixel buffer CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - // Get the number of bytes per row for the pixel buffer void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer); @@ -225,9 +226,10 @@ - (void)captureOutput:(AVCaptureOutput *)output // Create a bitmap graphics context with the sample buffer data CGBitmapInfo bitmapInfo = - (kCGBitmapAlphaInfoMask & kCGImageAlphaPremultipliedFirst) | kCGBitmapByteOrder32Little; + (kCGBitmapAlphaInfoMask & kCGImageAlphaPremultipliedFirst) | kCGBitmapByteOrder32Little; - CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo); + CGContextRef context = + CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo); // Create a Quartz image from the pixel data in the bitmap graphics context CGImageRef quartzImage = CGBitmapContextCreateImage(context); @@ -382,16 +384,17 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result { - (void)startByteStreamWithMessenger:(NSObject *)messenger { if (!_isStreamingBytes) { - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:@"plugins.flutter.io/camera/bytes" - binaryMessenger:messenger]; + FlutterEventChannel *eventChannel = + [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/bytes" + binaryMessenger:messenger]; _byteStreamHandler = [[FLTByteStreamHandler alloc] init]; [eventChannel setStreamHandler:_byteStreamHandler]; _isStreamingBytes = YES; } else { - _eventSink(@{@"event" : @"error", @"errorDescription" : @"Bytes from camera are already streaming!"}); + _eventSink( + @{@"event" : @"error", @"errorDescription" : @"Bytes from camera are already streaming!"}); } } @@ -400,7 +403,8 @@ - (void)stopByteStream { _isStreamingBytes = NO; _byteStreamHandler = nil; } else { - _eventSink(@{@"event" : @"error", @"errorDescription" : @"Bytes from camera are not streaming!"}); + _eventSink( + @{@"event" : @"error", @"errorDescription" : @"Bytes from camera are not streaming!"}); } } From 5fcfeb202d90f1a8fe7793a74f4d1ebcbb871cab Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 28 Nov 2018 13:02:30 -0800 Subject: [PATCH 07/34] Stream yuv bytes instead --- packages/camera/ios/Classes/CameraPlugin.m | 32 +++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index c17a25ef9ae1..d69825db3ee7 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -144,7 +144,7 @@ - (instancetype)initWithCameraName:(NSString *)cameraName _captureVideoOutput = [AVCaptureVideoDataOutput new]; _captureVideoOutput.videoSettings = - @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; + @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES]; [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; @@ -209,6 +209,35 @@ - (void)captureOutput:(AVCaptureOutput *)output // Get a CMSampleBuffer's Core Video image buffer for the media data CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); + NSLog(@"%zu", planeCount); + + NSMutableData *mutableData = [NSMutableData data]; + + for (int i = 0; i < planeCount; i++) { + void *planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); + size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); + size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); + size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); + + unsigned long length = bytesPerRow * height; + [mutableData appendBytes:planeAddress length:[NSNumber numberWithUnsignedLong:length].unsignedIntegerValue]; + + NSLog(@"%zu", bytesPerRow); + NSLog(@"%zu", height); + NSLog(@"%zu", width); + } + + FlutterStandardTypedData *eventData = [FlutterStandardTypedData typedDataWithBytes:mutableData]; + _byteStreamHandler.eventSink(eventData); + + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + //NSData *data0 = [NSData dataWithBytes:planeAddress0 length:0]; + + /* // Lock the base address of the pixel buffer CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); @@ -244,6 +273,7 @@ - (void)captureOutput:(AVCaptureOutput *)output _byteStreamHandler.eventSink(eventData); CGImageRelease(quartzImage); + */ } if (_isRecording) { if (_videoWriter.status == AVAssetWriterStatusFailed) { From 040d1aee5b47277d840cc30e6fbc698b61ce611b Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 29 Nov 2018 13:15:17 -0800 Subject: [PATCH 08/34] Make video format a constant --- packages/camera/ios/Classes/CameraPlugin.m | 51 +++------------------- 1 file changed, 5 insertions(+), 46 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index d69825db3ee7..f6d2c3ee4d1e 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -103,16 +103,19 @@ @interface FLTCam : NSObject *)messenger; - (void)stopByteStream; - (void)captureToFile:(NSString *)filename result:(FlutterResult)result; @end @implementation FLTCam +FourCharCode const videoFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; + - (instancetype)initWithCameraName:(NSString *)cameraName resolutionPreset:(NSString *)resolutionPreset error:(NSError **)error { @@ -144,7 +147,7 @@ - (instancetype)initWithCameraName:(NSString *)cameraName _captureVideoOutput = [AVCaptureVideoDataOutput new]; _captureVideoOutput.videoSettings = - @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; + @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat) }; [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES]; [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; @@ -224,56 +227,12 @@ - (void)captureOutput:(AVCaptureOutput *)output unsigned long length = bytesPerRow * height; [mutableData appendBytes:planeAddress length:[NSNumber numberWithUnsignedLong:length].unsignedIntegerValue]; - - NSLog(@"%zu", bytesPerRow); - NSLog(@"%zu", height); - NSLog(@"%zu", width); } FlutterStandardTypedData *eventData = [FlutterStandardTypedData typedDataWithBytes:mutableData]; _byteStreamHandler.eventSink(eventData); CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - //NSData *data0 = [NSData dataWithBytes:planeAddress0 length:0]; - - /* - // Lock the base address of the pixel buffer - CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - // Get the number of bytes per row for the pixel buffer - void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer); - - // Get the number of bytes per row for the pixel buffer - size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); - // Get the pixel buffer width and height - size_t width = CVPixelBufferGetWidth(pixelBuffer); - size_t height = CVPixelBufferGetHeight(pixelBuffer); - - // Create a device-dependent RGB color space - CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); - - // Create a bitmap graphics context with the sample buffer data - CGBitmapInfo bitmapInfo = - (kCGBitmapAlphaInfoMask & kCGImageAlphaPremultipliedFirst) | kCGBitmapByteOrder32Little; - - CGContextRef context = - CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo); - - // Create a Quartz image from the pixel data in the bitmap graphics context - CGImageRef quartzImage = CGBitmapContextCreateImage(context); - - // Unlock the pixel buffer - CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - // Create an image object from the Quartz image - NSData *data = UIImageJPEGRepresentation([[UIImage alloc] initWithCGImage:quartzImage], 1); - - FlutterStandardTypedData *eventData = [FlutterStandardTypedData typedDataWithBytes:data]; - _byteStreamHandler.eventSink(eventData); - - CGImageRelease(quartzImage); - */ } if (_isRecording) { if (_videoWriter.status == AVAssetWriterStatusFailed) { From 16d1d322b05b94169873000f34517da0e4288fed Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 29 Nov 2018 13:42:18 -0800 Subject: [PATCH 09/34] Pass back metadata for ios image --- packages/camera/ios/Classes/CameraPlugin.m | 38 +++++++++++++++------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index f6d2c3ee4d1e..97e9604cce01 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -28,6 +28,7 @@ @interface FLTByteStreamHandler : NSObject @implementation FLTByteStreamHandler { } - (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; return nil; } @@ -210,27 +211,40 @@ - (void)captureOutput:(AVCaptureOutput *)output if (_isStreamingBytes) { if (!_byteStreamHandler.eventSink) return; - // Get a CMSampleBuffer's Core Video image buffer for the media data CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); - NSLog(@"%zu", planeCount); + size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer); + size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer); - NSMutableData *mutableData = [NSMutableData data]; + NSMutableArray *planes = [NSMutableArray array]; + size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); for (int i = 0; i < planeCount; i++) { - void *planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); - size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); - size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); - size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); + void *planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i); + size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i); + size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i); + size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i); - unsigned long length = bytesPerRow * height; - [mutableData appendBytes:planeAddress length:[NSNumber numberWithUnsignedLong:length].unsignedIntegerValue]; + NSNumber *length = @(bytesPerRow * height); + NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue]; + + NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary]; + planeBuffer[@"bytesPerRow"] = @(bytesPerRow); + planeBuffer[@"width"] = @(width); + planeBuffer[@"height"] = @(height); + planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes]; + + [planes addObject:planeBuffer]; } - FlutterStandardTypedData *eventData = [FlutterStandardTypedData typedDataWithBytes:mutableData]; - _byteStreamHandler.eventSink(eventData); + NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary]; + imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth]; + imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight]; + imageBuffer[@"format"] = @(videoFormat); + imageBuffer[@"planes"] = planes; + + _byteStreamHandler.eventSink(imageBuffer); CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); } From 8e8897892db209e05d4ad92f5e3da2c3c7dc6611 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 29 Nov 2018 14:18:20 -0800 Subject: [PATCH 10/34] Pass back metadata for android image --- .../flutter/plugins/camera/CameraPlugin.java | 43 ++++++++++--------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index 77f0068db92f..16400a6aa4b1 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -824,32 +824,33 @@ public void onImageAvailable(final ImageReader reader) { Image img = reader.acquireLatestImage(); if (img == null) return; - eventSink.success(YUV_420_888toNV21(img)); - img.close(); - } - }, - null); - } - - private byte[] YUV_420_888toNV21(Image image) { - byte[] nv21; + List> planes = new ArrayList<>(); + for (Image.Plane plane : img.getPlanes()) { + ByteBuffer buffer = plane.getBuffer(); - ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); - ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); - ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); + byte[] bytes = new byte[buffer.remaining()]; + buffer.get(bytes, 0, bytes.length); - int ySize = yBuffer.remaining(); - int uSize = uBuffer.remaining(); - int vSize = vBuffer.remaining(); + Map planeBuffer = new HashMap<>(); + planeBuffer.put("bytesPerRow", plane.getRowStride()); + planeBuffer.put("width", img.getWidth()); + planeBuffer.put("height", img.getHeight()); + planeBuffer.put("bytes", bytes); - nv21 = new byte[ySize + uSize + vSize]; + planes.add(planeBuffer); + } - //U and V are swapped - yBuffer.get(nv21, 0, ySize); - vBuffer.get(nv21, ySize, vSize); - uBuffer.get(nv21, ySize + vSize, uSize); + Map imageBuffer = new HashMap<>(); + imageBuffer.put("width", img.getWidth()); + imageBuffer.put("height", img.getHeight()); + imageBuffer.put("format", img.getFormat()); + imageBuffer.put("planes", planes); - return nv21; + eventSink.success(imageBuffer); + img.close(); + } + }, + null); } private void sendErrorEvent(String errorDescription) { From 67f83042ed82b2fdb6c283055cdf6d7678072ea5 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 29 Nov 2018 20:47:33 -0800 Subject: [PATCH 11/34] Dart code now parses camera image buffer --- packages/camera/lib/camera.dart | 37 +++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index e8c6538e61d5..80b85c99708b 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -11,7 +11,34 @@ enum CameraLensDirection { front, back, external } enum ResolutionPreset { low, medium, high } -typedef void OnLatestImageAvailable(Uint8List bytes); +typedef void OnLatestImageAvailable(CameraImage image); + +class Plane { + Plane._fromPlatformData(dynamic data) + : bytes = data['bytes'], + bytesPerRow = data['bytesPerRow'], + height = data['height'], + width = data['width']; + + final Uint8List bytes; + final int bytesPerRow; + final int height; + final int width; +} + +class CameraImage { + CameraImage._fromPlatformData(dynamic data) + : format = data['format'], + height = data['height'], + width = data['width'], + planes = List.unmodifiable(data['planes'] + .map((dynamic planeData) => Plane._fromPlatformData(planeData))); + + final dynamic format; + final int height; + final int width; + final List planes; +} /// Returns the resolution preset as a String. String serializeResolutionPreset(ResolutionPreset resolutionPreset) { @@ -317,9 +344,11 @@ class CameraController extends ValueNotifier { const EventChannel cameraEventChannel = EventChannel('plugins.flutter.io/camera/bytes'); _byteStreamSubscription = - cameraEventChannel.receiveBroadcastStream().listen((dynamic bytes) { - onAvailable(bytes); - }); + cameraEventChannel.receiveBroadcastStream().listen( + (dynamic imageData) { + onAvailable(CameraImage._fromPlatformData(imageData)); + }, + ); } Future stopByteStream() async { From 646283a7f7d69f58b8c2b54a81b26082837238ed Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Fri, 30 Nov 2018 21:13:52 +0000 Subject: [PATCH 12/34] YUV image to bgra --- packages/camera/ios/Classes/CameraPlugin.m | 54 +++++++++++++++++++++- 1 file changed, 52 insertions(+), 2 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 97e9604cce01..58c9ac78d899 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -1,6 +1,7 @@ #import "CameraPlugin.h" #import #import +#import @interface NSError (FlutterError) @property(readonly, nonatomic) FlutterError *flutterError; @@ -115,7 +116,9 @@ - (void)captureToFile:(NSString *)filename result:(FlutterResult)result; @end @implementation FLTCam -FourCharCode const videoFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; +FourCharCode const videoFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; +vImage_Buffer destinationBuffer; +vImage_Buffer conversionBuffer; - (instancetype)initWithCameraName:(NSString *)cameraName resolutionPreset:(NSString *)resolutionPreset @@ -332,7 +335,54 @@ - (CVPixelBufferRef)copyPixelBuffer { while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) { pixelBuffer = _latestPixelBuffer; } - return pixelBuffer; + + return [self convertYUVImageTOBGRA:pixelBuffer]; +} + +- (CVPixelBufferRef)convertYUVImageTOBGRA:(CVPixelBufferRef)pixelBuffer { + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + vImage_YpCbCrToARGB infoYpCbCrToARGB; + vImage_YpCbCrPixelRange pixelRange; + pixelRange.Yp_bias = 16; + pixelRange.CbCr_bias = 128; + pixelRange.YpRangeMax = 235; + pixelRange.CbCrRangeMax = 240; + pixelRange.YpMax = 235; + pixelRange.YpMin = 16; + pixelRange.CbCrMax = 240; + pixelRange.CbCrMin = 16; + + vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4, &pixelRange, &infoYpCbCrToARGB, kvImage420Yp8_CbCr8, kvImageARGB8888, kvImageNoFlags); + + vImage_Buffer sourceLumaBuffer; + sourceLumaBuffer.data = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); + sourceLumaBuffer.height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); + sourceLumaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);; + sourceLumaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); + + vImage_Buffer sourceChromaBuffer; + sourceChromaBuffer.data = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); + sourceChromaBuffer.height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); + sourceChromaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); + sourceChromaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); + + if(!destinationBuffer.height) vImageBuffer_Init(&destinationBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32, kvImageNoFlags); + + vImageConvert_420Yp8_CbCr8ToARGB8888(&sourceLumaBuffer, &sourceChromaBuffer, &destinationBuffer, &infoYpCbCrToARGB, NULL, 255, kvImagePrintDiagnosticsToConsole); + + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + CVPixelBufferRelease(pixelBuffer); + + if(!conversionBuffer.height) vImageBuffer_Init(&conversionBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32, kvImageNoFlags); + + const uint8_t map[4] = { 3, 2, 1, 0 }; + vImagePermuteChannels_ARGB8888(&destinationBuffer, &conversionBuffer, map, kvImageNoFlags); + + CVPixelBufferRef newPixelBuffer = NULL; + CVPixelBufferCreateWithBytes(NULL, conversionBuffer.width, conversionBuffer.height, kCVPixelFormatType_32BGRA, conversionBuffer.data, conversionBuffer.rowBytes, NULL, NULL, NULL, &newPixelBuffer); + + return newPixelBuffer; } - (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { From 7e9969166fc6b28310979af6c8dec69e5e2fa1f1 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 6 Dec 2018 20:11:09 +0000 Subject: [PATCH 13/34] Add documentation --- packages/camera/lib/camera.dart | 107 +++++++++++++++++++++++++++++++- 1 file changed, 106 insertions(+), 1 deletion(-) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 80b85c99708b..cfd13719da35 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -13,19 +13,93 @@ enum ResolutionPreset { low, medium, high } typedef void OnLatestImageAvailable(CameraImage image); +/// A single color plane of image data. +/// +/// The number and meaning of the planes in an image are determined by the +/// format of the Image. class Plane { Plane._fromPlatformData(dynamic data) : bytes = data['bytes'], + bytesPerPixel = data['bytesPerPixel'], bytesPerRow = data['bytesPerRow'], height = data['height'], width = data['width']; + /// Bytes representing this plane. final Uint8List bytes; + + /// The distance between adjacent pixel samples on Android, in bytes. + /// + /// Will be `null` on iOS. + final int bytesPerPixel; + + /// The row stride for this color plane, in bytes. final int bytesPerRow; + + /// Height of the pixel buffer on iOS. + /// + /// Will be `null` on Android final int height; + + /// Width of the pixel buffer on iOS. + /// + /// Will be `null` on Android. final int width; } +/// Group of image formats that are comparable across Android and iOS platforms. +enum ImageFormatGroup { + /// The image format does not fit into any specific group. + unknown, + + /// Multi-plane YUV 420 format. + /// + /// This format is a generic YCbCr format, capable of describing any 4:2:0 + /// chroma-subsampled planar or semiplanar buffer (but not fully interleaved), + /// with 8 bits per color sample. + /// + /// On Android, this is `android.graphics.ImageFormat.YUV_420_888`. See + /// https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888 + /// + /// On iOS, this is `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`. See + /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers/kcvpixelformattype_420ypcbcr8biplanarvideorange?language=objc + yuv420, +} + +/// Describes how pixels are represented in an image. +class ImageFormat { + ImageFormat._fromPlatformData(this.raw) : group = _asImageFormatGroup(raw); + + /// Describes the format group the raw image format falls into. + final ImageFormatGroup group; + + /// Raw version of the format from the Android or iOS platform. + /// + /// On Android, this is an `int` from class `android.graphics.ImageFormat`. See + /// https://developer.android.com/reference/android/graphics/ImageFormat + /// + /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers. + /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc + final dynamic raw; +} + +ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { + if (rawFormat == 35 || rawFormat == 'woeifj') { + return ImageFormatGroup.yuv420; + } else { + return ImageFormatGroup.unknown; + } +} + +/// A single complete image buffer from the platform camera. +/// +/// This class allows for direct application access to the pixel data of an +/// Image through one or more [Uint8List]. Each buffer is encapsulated in a +/// [Plane] that describes the layout of the pixel data in that plane. The +/// [CameraImage] is not directly usable as a UI resource. +/// +/// Although not all image formats are planar on iOS, we treat 1-dimensional +/// images as single planar images. class CameraImage { CameraImage._fromPlatformData(dynamic data) : format = data['format'], @@ -34,9 +108,27 @@ class CameraImage { planes = List.unmodifiable(data['planes'] .map((dynamic planeData) => Plane._fromPlatformData(planeData))); - final dynamic format; + /// Format of the image provided. + /// + /// Determines the number of planes needed to represent the image, and + /// the general layout of the pixel data in each [Uint8List]. + final ImageFormat format; + + /// Height of the image in pixels. + /// + /// For formats where some color channels are subsampled, this is the height + /// of the largest-resolution plane. final int height; + + /// Width of the image in pixels. + /// + /// For formats where some color channels are subsampled, this is the width + /// of the largest-resolution plane. final int width; + + /// The pixels planes for this image. + /// + /// The number of planes is determined by the format of the image. final List planes; } @@ -315,6 +407,15 @@ class CameraController extends ValueNotifier { } } + /// Start a streaming bytes from platform camera. + /// + /// The stream will always use the latest image and discard the others. + /// + /// [onAvailable] is a method that takes a [CameraImage] and + /// returns `void`. + /// + /// Throws a [CameraException] if byte streaming or video recording has + /// already started. Future startByteStream(OnLatestImageAvailable onAvailable) async { if (!value.isInitialized || _isDisposed) { throw CameraException( @@ -351,6 +452,10 @@ class CameraController extends ValueNotifier { ); } + /// Stop the stream of bytes from the camera. + /// + /// Throws a [CameraException] if byte streaming was not started or video + /// recording was started. Future stopByteStream() async { if (!value.isInitialized || _isDisposed) { throw CameraException( From 4cbfab9e8d2bfcb0632ed59d6546edd8b434bac2 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 6 Dec 2018 20:12:31 +0000 Subject: [PATCH 14/34] Only pass available data on Android --- .../src/main/java/io/flutter/plugins/camera/CameraPlugin.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index 16400a6aa4b1..c859a3759fd2 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -833,8 +833,7 @@ public void onImageAvailable(final ImageReader reader) { Map planeBuffer = new HashMap<>(); planeBuffer.put("bytesPerRow", plane.getRowStride()); - planeBuffer.put("width", img.getWidth()); - planeBuffer.put("height", img.getHeight()); + planeBuffer.put("bytesPerPixel", plane.getPixelStride()); planeBuffer.put("bytes", bytes); planes.add(planeBuffer); From 297fe7a8182e94cec336105d43cc51b1c748cde6 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 6 Dec 2018 22:10:12 +0000 Subject: [PATCH 15/34] Bump version --- packages/camera/CHANGELOG.md | 4 ++++ packages/camera/pubspec.yaml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/camera/CHANGELOG.md b/packages/camera/CHANGELOG.md index 1ea41de34af9..72bc7e584607 100644 --- a/packages/camera/CHANGELOG.md +++ b/packages/camera/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.7 + +* Add byte streaming capability for the camera + ## 0.2.6 * Update the camera to use the physical device's orientation instead of the UI diff --git a/packages/camera/pubspec.yaml b/packages/camera/pubspec.yaml index 9a7e109f2864..62e0f7212346 100644 --- a/packages/camera/pubspec.yaml +++ b/packages/camera/pubspec.yaml @@ -1,7 +1,7 @@ name: camera description: A Flutter plugin for getting information about and controlling the camera on Android and iOS. Supports previewing the camera feed and capturing images. -version: 0.2.6 +version: 0.2.7 authors: - Flutter Team - Luigi Agosti From bdd900799e0c4eee81bb49af1e0a00db0367239c Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 6 Dec 2018 22:16:08 +0000 Subject: [PATCH 16/34] Formatting --- packages/camera/ios/Classes/CameraPlugin.m | 32 +++++++++++++++------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index c38bd8cac341..227a4f5e534c 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -1,7 +1,7 @@ #import "CameraPlugin.h" #import -#import #import +#import @interface NSError (FlutterError) @property(readonly, nonatomic) FlutterError *flutterError; @@ -22,7 +22,7 @@ @interface FLTSavePhotoDelegate : NSObject - initWithPath:(NSString *)filename result:(FlutterResult)result; @end -@interface FLTByteStreamHandler : NSObject +@interface FLTByteStreamHandler : NSObject @property(readonly, nonatomic) FlutterEventSink eventSink; @end @@ -153,7 +153,7 @@ - (instancetype)initWithCameraName:(NSString *)cameraName _captureVideoOutput = [AVCaptureVideoDataOutput new]; _captureVideoOutput.videoSettings = - @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat) }; + @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)}; [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES]; [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; @@ -355,12 +355,14 @@ - (CVPixelBufferRef)convertYUVImageTOBGRA:(CVPixelBufferRef)pixelBuffer { pixelRange.CbCrMax = 240; pixelRange.CbCrMin = 16; - vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4, &pixelRange, &infoYpCbCrToARGB, kvImage420Yp8_CbCr8, kvImageARGB8888, kvImageNoFlags); + vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4, &pixelRange, + &infoYpCbCrToARGB, kvImage420Yp8_CbCr8, + kvImageARGB8888, kvImageNoFlags); vImage_Buffer sourceLumaBuffer; sourceLumaBuffer.data = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); sourceLumaBuffer.height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); - sourceLumaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);; + sourceLumaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); sourceLumaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); vImage_Buffer sourceChromaBuffer; @@ -369,20 +371,30 @@ - (CVPixelBufferRef)convertYUVImageTOBGRA:(CVPixelBufferRef)pixelBuffer { sourceChromaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); sourceChromaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); - if(!destinationBuffer.height) vImageBuffer_Init(&destinationBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32, kvImageNoFlags); + if (!destinationBuffer.height) { + vImageBuffer_Init(&destinationBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32, + kvImageNoFlags); + } - vImageConvert_420Yp8_CbCr8ToARGB8888(&sourceLumaBuffer, &sourceChromaBuffer, &destinationBuffer, &infoYpCbCrToARGB, NULL, 255, kvImagePrintDiagnosticsToConsole); + vImageConvert_420Yp8_CbCr8ToARGB8888(&sourceLumaBuffer, &sourceChromaBuffer, &destinationBuffer, + &infoYpCbCrToARGB, NULL, 255, + kvImagePrintDiagnosticsToConsole); CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); CVPixelBufferRelease(pixelBuffer); - if(!conversionBuffer.height) vImageBuffer_Init(&conversionBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32, kvImageNoFlags); + if (!conversionBuffer.height) { + vImageBuffer_Init(&conversionBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32, + kvImageNoFlags); + } - const uint8_t map[4] = { 3, 2, 1, 0 }; + const uint8_t map[4] = {3, 2, 1, 0}; vImagePermuteChannels_ARGB8888(&destinationBuffer, &conversionBuffer, map, kvImageNoFlags); CVPixelBufferRef newPixelBuffer = NULL; - CVPixelBufferCreateWithBytes(NULL, conversionBuffer.width, conversionBuffer.height, kCVPixelFormatType_32BGRA, conversionBuffer.data, conversionBuffer.rowBytes, NULL, NULL, NULL, &newPixelBuffer); + CVPixelBufferCreateWithBytes(NULL, conversionBuffer.width, conversionBuffer.height, + kCVPixelFormatType_32BGRA, conversionBuffer.data, + conversionBuffer.rowBytes, NULL, NULL, NULL, &newPixelBuffer); return newPixelBuffer; } From ccc057be0d304de6e88490fceeee9a9cc2a966ff Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Fri, 7 Dec 2018 18:38:45 +0000 Subject: [PATCH 17/34] create imageformat error --- packages/camera/lib/camera.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index cfd13719da35..ae15b39d0f92 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -102,7 +102,7 @@ ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { /// images as single planar images. class CameraImage { CameraImage._fromPlatformData(dynamic data) - : format = data['format'], + : format = ImageFormat._fromPlatformData(data['format']), height = data['height'], width = data['width'], planes = List.unmodifiable(data['planes'] From 989edf61b938c4bba7b91b5396dfe1e94e577cba Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Fri, 7 Dec 2018 19:06:26 +0000 Subject: [PATCH 18/34] Don't return from null --- packages/camera/ios/Classes/CameraPlugin.m | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 227a4f5e534c..9f2b5b12e385 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -213,9 +213,7 @@ - (void)captureOutput:(AVCaptureOutput *)output }); return; } - if (_isStreamingBytes) { - if (!_byteStreamHandler.eventSink) return; - + if (_isStreamingBytes && _byteStreamHandler.eventSink) { CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); From b1d7b89a1b6399d54339030089ed095069383429 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Mon, 10 Dec 2018 15:37:00 -0800 Subject: [PATCH 19/34] Init buffers in constructor --- packages/camera/CHANGELOG.md | 2 +- packages/camera/ios/Classes/CameraPlugin.m | 91 ++++++++++------------ 2 files changed, 44 insertions(+), 49 deletions(-) diff --git a/packages/camera/CHANGELOG.md b/packages/camera/CHANGELOG.md index 72bc7e584607..c127c90a38bb 100644 --- a/packages/camera/CHANGELOG.md +++ b/packages/camera/CHANGELOG.md @@ -1,6 +1,6 @@ ## 0.2.7 -* Add byte streaming capability for the camera +* Add byte streaming capability for the camera. ## 0.2.6 diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 9f2b5b12e385..6c82af8c54b9 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -23,7 +23,7 @@ @interface FLTSavePhotoDelegate : NSObject @end @interface FLTByteStreamHandler : NSObject -@property(readonly, nonatomic) FlutterEventSink eventSink; +@property FlutterEventSink eventSink; @end @implementation FLTByteStreamHandler { @@ -104,6 +104,8 @@ @interface FLTCam : NSObject Date: Mon, 10 Dec 2018 17:02:46 -0800 Subject: [PATCH 20/34] Add yuv ios format --- packages/camera/lib/camera.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index ae15b39d0f92..0a47fe009977 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -84,7 +84,7 @@ class ImageFormat { } ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { - if (rawFormat == 35 || rawFormat == 'woeifj') { + if (rawFormat == 35 || rawFormat == 875704438) { return ImageFormatGroup.yuv420; } else { return ImageFormatGroup.unknown; From 0349ae20f9cd3b0a67a1e1eaa117d39e42a0f9ec Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Tue, 11 Dec 2018 12:21:55 -0800 Subject: [PATCH 21/34] Used presets with defined resolution. Sometimes resolution would come back wrong. Also init constructors with resolutions --- packages/camera/ios/Classes/CameraPlugin.m | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 6c82af8c54b9..c20e75f7645a 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -130,13 +130,16 @@ - (instancetype)initWithCameraName:(NSString *)cameraName _captureSession = [[AVCaptureSession alloc] init]; AVCaptureSessionPreset preset; if ([resolutionPreset isEqualToString:@"high"]) { - preset = AVCaptureSessionPresetHigh; + preset = AVCaptureSessionPreset1280x720; + _previewSize = CGSizeMake(1280, 720); } else if ([resolutionPreset isEqualToString:@"medium"]) { - preset = AVCaptureSessionPresetMedium; + preset = AVCaptureSessionPreset640x480; + _previewSize = CGSizeMake(640, 480); } else { NSAssert([resolutionPreset isEqualToString:@"low"], @"Unknown resolution preset %@", resolutionPreset); - preset = AVCaptureSessionPresetLow; + preset = AVCaptureSessionPreset352x288; + _previewSize = CGSizeMake(352, 288); } _captureSession.sessionPreset = preset; _captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName]; @@ -147,12 +150,9 @@ - (instancetype)initWithCameraName:(NSString *)cameraName *error = localError; return nil; } - CMVideoDimensions dimensions = - CMVideoFormatDescriptionGetDimensions([[_captureDevice activeFormat] formatDescription]); - _previewSize = CGSizeMake(dimensions.width, dimensions.height); - vImageBuffer_Init(&_destinationBuffer, 1280, 720, 32, kvImageNoFlags); - vImageBuffer_Init(&_conversionBuffer, 1280, 720, 32, kvImageNoFlags); + vImageBuffer_Init(&_destinationBuffer, _previewSize.width, _previewSize.height, 32, kvImageNoFlags); + vImageBuffer_Init(&_conversionBuffer, _previewSize.width, _previewSize.height, 32, kvImageNoFlags); _captureVideoOutput = [AVCaptureVideoDataOutput new]; _captureVideoOutput.videoSettings = From 2633c4975fdc961d9ac77a5da0b55261120799ba Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 13 Dec 2018 11:28:42 -0800 Subject: [PATCH 22/34] Formatting --- packages/camera/ios/Classes/CameraPlugin.m | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index c20e75f7645a..c3e67ca30842 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -26,8 +26,8 @@ @interface FLTByteStreamHandler : NSObject @property FlutterEventSink eventSink; @end -@implementation FLTByteStreamHandler { -} +@implementation FLTByteStreamHandler + - (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { _eventSink = nil; return nil; @@ -151,8 +151,10 @@ - (instancetype)initWithCameraName:(NSString *)cameraName return nil; } - vImageBuffer_Init(&_destinationBuffer, _previewSize.width, _previewSize.height, 32, kvImageNoFlags); - vImageBuffer_Init(&_conversionBuffer, _previewSize.width, _previewSize.height, 32, kvImageNoFlags); + vImageBuffer_Init(&_destinationBuffer, _previewSize.width, _previewSize.height, 32, + kvImageNoFlags); + vImageBuffer_Init(&_conversionBuffer, _previewSize.width, _previewSize.height, 32, + kvImageNoFlags); _captureVideoOutput = [AVCaptureVideoDataOutput new]; _captureVideoOutput.videoSettings = From 6377c641a59cc7b9fccabd1fad57fceb0a408ac6 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 13 Dec 2018 11:29:36 -0800 Subject: [PATCH 23/34] Move CameraImage classes to separate file --- packages/camera/lib/camera.dart | 119 --------------------- packages/camera/lib/src/camera_image.dart | 122 ++++++++++++++++++++++ 2 files changed, 122 insertions(+), 119 deletions(-) create mode 100644 packages/camera/lib/src/camera_image.dart diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 0a47fe009977..3dd3dce1f370 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -13,125 +13,6 @@ enum ResolutionPreset { low, medium, high } typedef void OnLatestImageAvailable(CameraImage image); -/// A single color plane of image data. -/// -/// The number and meaning of the planes in an image are determined by the -/// format of the Image. -class Plane { - Plane._fromPlatformData(dynamic data) - : bytes = data['bytes'], - bytesPerPixel = data['bytesPerPixel'], - bytesPerRow = data['bytesPerRow'], - height = data['height'], - width = data['width']; - - /// Bytes representing this plane. - final Uint8List bytes; - - /// The distance between adjacent pixel samples on Android, in bytes. - /// - /// Will be `null` on iOS. - final int bytesPerPixel; - - /// The row stride for this color plane, in bytes. - final int bytesPerRow; - - /// Height of the pixel buffer on iOS. - /// - /// Will be `null` on Android - final int height; - - /// Width of the pixel buffer on iOS. - /// - /// Will be `null` on Android. - final int width; -} - -/// Group of image formats that are comparable across Android and iOS platforms. -enum ImageFormatGroup { - /// The image format does not fit into any specific group. - unknown, - - /// Multi-plane YUV 420 format. - /// - /// This format is a generic YCbCr format, capable of describing any 4:2:0 - /// chroma-subsampled planar or semiplanar buffer (but not fully interleaved), - /// with 8 bits per color sample. - /// - /// On Android, this is `android.graphics.ImageFormat.YUV_420_888`. See - /// https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888 - /// - /// On iOS, this is `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`. See - /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers/kcvpixelformattype_420ypcbcr8biplanarvideorange?language=objc - yuv420, -} - -/// Describes how pixels are represented in an image. -class ImageFormat { - ImageFormat._fromPlatformData(this.raw) : group = _asImageFormatGroup(raw); - - /// Describes the format group the raw image format falls into. - final ImageFormatGroup group; - - /// Raw version of the format from the Android or iOS platform. - /// - /// On Android, this is an `int` from class `android.graphics.ImageFormat`. See - /// https://developer.android.com/reference/android/graphics/ImageFormat - /// - /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers. - /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc - final dynamic raw; -} - -ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { - if (rawFormat == 35 || rawFormat == 875704438) { - return ImageFormatGroup.yuv420; - } else { - return ImageFormatGroup.unknown; - } -} - -/// A single complete image buffer from the platform camera. -/// -/// This class allows for direct application access to the pixel data of an -/// Image through one or more [Uint8List]. Each buffer is encapsulated in a -/// [Plane] that describes the layout of the pixel data in that plane. The -/// [CameraImage] is not directly usable as a UI resource. -/// -/// Although not all image formats are planar on iOS, we treat 1-dimensional -/// images as single planar images. -class CameraImage { - CameraImage._fromPlatformData(dynamic data) - : format = ImageFormat._fromPlatformData(data['format']), - height = data['height'], - width = data['width'], - planes = List.unmodifiable(data['planes'] - .map((dynamic planeData) => Plane._fromPlatformData(planeData))); - - /// Format of the image provided. - /// - /// Determines the number of planes needed to represent the image, and - /// the general layout of the pixel data in each [Uint8List]. - final ImageFormat format; - - /// Height of the image in pixels. - /// - /// For formats where some color channels are subsampled, this is the height - /// of the largest-resolution plane. - final int height; - - /// Width of the image in pixels. - /// - /// For formats where some color channels are subsampled, this is the width - /// of the largest-resolution plane. - final int width; - - /// The pixels planes for this image. - /// - /// The number of planes is determined by the format of the image. - final List planes; -} - /// Returns the resolution preset as a String. String serializeResolutionPreset(ResolutionPreset resolutionPreset) { switch (resolutionPreset) { diff --git a/packages/camera/lib/src/camera_image.dart b/packages/camera/lib/src/camera_image.dart new file mode 100644 index 000000000000..6c960b1eae17 --- /dev/null +++ b/packages/camera/lib/src/camera_image.dart @@ -0,0 +1,122 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +/// A single color plane of image data. +/// +/// The number and meaning of the planes in an image are determined by the +/// format of the Image. +class Plane { + Plane._fromPlatformData(dynamic data) + : bytes = data['bytes'], + bytesPerPixel = data['bytesPerPixel'], + bytesPerRow = data['bytesPerRow'], + height = data['height'], + width = data['width']; + + /// Bytes representing this plane. + final Uint8List bytes; + + /// The distance between adjacent pixel samples on Android, in bytes. + /// + /// Will be `null` on iOS. + final int bytesPerPixel; + + /// The row stride for this color plane, in bytes. + final int bytesPerRow; + + /// Height of the pixel buffer on iOS. + /// + /// Will be `null` on Android + final int height; + + /// Width of the pixel buffer on iOS. + /// + /// Will be `null` on Android. + final int width; +} + +/// Group of image formats that are comparable across Android and iOS platforms. +enum ImageFormatGroup { + /// The image format does not fit into any specific group. + unknown, + + /// Multi-plane YUV 420 format. + /// + /// This format is a generic YCbCr format, capable of describing any 4:2:0 + /// chroma-subsampled planar or semiplanar buffer (but not fully interleaved), + /// with 8 bits per color sample. + /// + /// On Android, this is `android.graphics.ImageFormat.YUV_420_888`. See + /// https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888 + /// + /// On iOS, this is `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`. See + /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers/kcvpixelformattype_420ypcbcr8biplanarvideorange?language=objc + yuv420, +} + +/// Describes how pixels are represented in an image. +class ImageFormat { + ImageFormat._fromPlatformData(this.raw) : group = _asImageFormatGroup(raw); + + /// Describes the format group the raw image format falls into. + final ImageFormatGroup group; + + /// Raw version of the format from the Android or iOS platform. + /// + /// On Android, this is an `int` from class `android.graphics.ImageFormat`. See + /// https://developer.android.com/reference/android/graphics/ImageFormat + /// + /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers. + /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc + final dynamic raw; +} + +ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { + if (rawFormat == 35 || rawFormat == 875704438) { + return ImageFormatGroup.yuv420; + } else { + return ImageFormatGroup.unknown; + } +} + +/// A single complete image buffer from the platform camera. +/// +/// This class allows for direct application access to the pixel data of an +/// Image through one or more [Uint8List]. Each buffer is encapsulated in a +/// [Plane] that describes the layout of the pixel data in that plane. The +/// [CameraImage] is not directly usable as a UI resource. +/// +/// Although not all image formats are planar on iOS, we treat 1-dimensional +/// images as single planar images. +class CameraImage { + CameraImage._fromPlatformData(dynamic data) + : format = ImageFormat._fromPlatformData(data['format']), + height = data['height'], + width = data['width'], + planes = List.unmodifiable(data['planes'] + .map((dynamic planeData) => Plane._fromPlatformData(planeData))); + + /// Format of the image provided. + /// + /// Determines the number of planes needed to represent the image, and + /// the general layout of the pixel data in each [Uint8List]. + final ImageFormat format; + + /// Height of the image in pixels. + /// + /// For formats where some color channels are subsampled, this is the height + /// of the largest-resolution plane. + final int height; + + /// Width of the image in pixels. + /// + /// For formats where some color channels are subsampled, this is the width + /// of the largest-resolution plane. + final int width; + + /// The pixels planes for this image. + /// + /// The number of planes is determined by the format of the image. + final List planes; +} \ No newline at end of file From 6660a68cd2335395a6a6769c4899fb25d1f60d50 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 13 Dec 2018 11:31:57 -0800 Subject: [PATCH 24/34] Move camera.dart to src folder --- packages/camera/lib/{ => src}/camera.dart | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename packages/camera/lib/{ => src}/camera.dart (100%) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/src/camera.dart similarity index 100% rename from packages/camera/lib/camera.dart rename to packages/camera/lib/src/camera.dart From a4d278e93f7616365e9d2b935407f4270e74d9d7 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Thu, 13 Dec 2018 11:35:33 -0800 Subject: [PATCH 25/34] Create camera library --- packages/camera/lib/camera.dart | 14 ++++++++++++++ packages/camera/lib/src/camera.dart | 8 ++++---- packages/camera/lib/src/camera_image.dart | 6 ++++-- 3 files changed, 22 insertions(+), 6 deletions(-) create mode 100644 packages/camera/lib/camera.dart diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart new file mode 100644 index 000000000000..8309b0414212 --- /dev/null +++ b/packages/camera/lib/camera.dart @@ -0,0 +1,14 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +library camera; + +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:flutter/services.dart'; +import 'package:flutter/widgets.dart'; + +part 'src/camera.dart'; +part 'src/camera_image.dart'; diff --git a/packages/camera/lib/src/camera.dart b/packages/camera/lib/src/camera.dart index 3dd3dce1f370..cc046e8b5f94 100644 --- a/packages/camera/lib/src/camera.dart +++ b/packages/camera/lib/src/camera.dart @@ -1,8 +1,8 @@ -import 'dart:async'; -import 'dart:typed_data'; +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. -import 'package:flutter/services.dart'; -import 'package:flutter/widgets.dart'; +part of camera; final MethodChannel _channel = const MethodChannel('plugins.flutter.io/camera') ..invokeMethod('init'); diff --git a/packages/camera/lib/src/camera_image.dart b/packages/camera/lib/src/camera_image.dart index 6c960b1eae17..11a1fbea73d4 100644 --- a/packages/camera/lib/src/camera_image.dart +++ b/packages/camera/lib/src/camera_image.dart @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +part of camera; + /// A single color plane of image data. /// /// The number and meaning of the planes in an image are determined by the @@ -95,7 +97,7 @@ class CameraImage { height = data['height'], width = data['width'], planes = List.unmodifiable(data['planes'] - .map((dynamic planeData) => Plane._fromPlatformData(planeData))); + .map((dynamic planeData) => Plane._fromPlatformData(planeData))); /// Format of the image provided. /// @@ -119,4 +121,4 @@ class CameraImage { /// /// The number of planes is determined by the format of the image. final List planes; -} \ No newline at end of file +} From 045dd532eddcd69120407e2cc77e1ad8397f3f1b Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 14:09:22 -0800 Subject: [PATCH 26/34] Better name and comments --- .../flutter/plugins/camera/CameraPlugin.java | 20 ++++++++++--------- packages/camera/ios/Classes/CameraPlugin.m | 4 ++++ 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index 714e8b821c73..dc3b7dd1b882 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -279,7 +279,7 @@ private class Camera { private CameraCaptureSession cameraCaptureSession; private EventChannel.EventSink eventSink; private ImageReader pictureImageReader; - private ImageReader byteImageReader; // Used to pass bytes to dart side. + private ImageReader imageStreamReader; private int sensorOrientation; private boolean isFrontFacing; private String cameraName; @@ -482,7 +482,9 @@ private void open(@Nullable final Result result) { pictureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2); - byteImageReader = + + // Used to steam image byte data to dart side. + imageStreamReader = ImageReader.newInstance( previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); @@ -767,8 +769,8 @@ private void startPreviewWithByteStream() throws CameraAccessException { surfaces.add(previewSurface); captureRequestBuilder.addTarget(previewSurface); - surfaces.add(byteImageReader.getSurface()); - captureRequestBuilder.addTarget(byteImageReader.getSurface()); + surfaces.add(imageStreamReader.getSurface()); + captureRequestBuilder.addTarget(imageStreamReader.getSurface()); cameraDevice.createCaptureSession( surfaces, @@ -812,13 +814,13 @@ public void onListen(Object o, EventChannel.EventSink eventSink) { @Override public void onCancel(Object o) { - byteImageReader.setOnImageAvailableListener(null, null); + imageStreamReader.setOnImageAvailableListener(null, null); } }); } private void setByteStreamImageAvailableListener(final EventChannel.EventSink eventSink) { - byteImageReader.setOnImageAvailableListener( + imageStreamReader.setOnImageAvailableListener( new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(final ImageReader reader) { @@ -880,9 +882,9 @@ private void close() { pictureImageReader.close(); pictureImageReader = null; } - if (byteImageReader != null) { - byteImageReader.close(); - byteImageReader = null; + if (imageStreamReader != null) { + imageStreamReader.close(); + imageStreamReader = null; } if (mediaRecorder != null) { mediaRecorder.reset(); diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index c3e67ca30842..188e08376956 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -120,6 +120,8 @@ - (void)captureToFile:(NSString *)filename result:(FlutterResult)result; @end @implementation FLTCam +// Yuv420 format used for iOS 10+, which is minimum requirement for this plugin. +// Format is used to stream image byte data to dart. FourCharCode const videoFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; - (instancetype)initWithCameraName:(NSString *)cameraName @@ -346,6 +348,8 @@ - (CVPixelBufferRef)copyPixelBuffer { return [self convertYUVImageToBGRA:pixelBuffer]; } +// Since video format was changed to kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange we have to convert +// image to a usable format for flutter textures. Which is kCVPixelFormatType_32BGRA. - (CVPixelBufferRef)convertYUVImageToBGRA:(CVPixelBufferRef)pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); From 01520fed1fa77b1ec606bd329ddf87297a6972f3 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 14:59:16 -0800 Subject: [PATCH 27/34] Change from library camera file --- packages/camera/lib/camera.dart | 454 +++++++++++++++++- .../camera/lib/{src => }/camera_image.dart | 2 +- packages/camera/lib/src/camera.dart | 454 ------------------ 3 files changed, 451 insertions(+), 459 deletions(-) rename packages/camera/lib/{src => }/camera_image.dart (99%) delete mode 100644 packages/camera/lib/src/camera.dart diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 8309b0414212..d582bde194ab 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -2,13 +2,459 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -library camera; - import 'dart:async'; import 'dart:typed_data'; import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; -part 'src/camera.dart'; -part 'src/camera_image.dart'; +part 'camera_image.dart'; + +final MethodChannel _channel = const MethodChannel('plugins.flutter.io/camera') + ..invokeMethod('init'); + +enum CameraLensDirection { front, back, external } + +enum ResolutionPreset { low, medium, high } + +typedef void OnLatestImageAvailable(CameraImage image); + +/// Returns the resolution preset as a String. +String serializeResolutionPreset(ResolutionPreset resolutionPreset) { + switch (resolutionPreset) { + case ResolutionPreset.high: + return 'high'; + case ResolutionPreset.medium: + return 'medium'; + case ResolutionPreset.low: + return 'low'; + } + throw ArgumentError('Unknown ResolutionPreset value'); +} + +CameraLensDirection _parseCameraLensDirection(String string) { + switch (string) { + case 'front': + return CameraLensDirection.front; + case 'back': + return CameraLensDirection.back; + case 'external': + return CameraLensDirection.external; + } + throw ArgumentError('Unknown CameraLensDirection value'); +} + +/// Completes with a list of available cameras. +/// +/// May throw a [CameraException]. +Future> availableCameras() async { + try { + final List cameras = + await _channel.invokeMethod('availableCameras'); + return cameras.map((dynamic camera) { + return CameraDescription( + name: camera['name'], + lensDirection: _parseCameraLensDirection(camera['lensFacing']), + ); + }).toList(); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } +} + +class CameraDescription { + CameraDescription({this.name, this.lensDirection}); + + final String name; + final CameraLensDirection lensDirection; + + @override + bool operator ==(Object o) { + return o is CameraDescription && + o.name == name && + o.lensDirection == lensDirection; + } + + @override + int get hashCode { + return hashValues(name, lensDirection); + } + + @override + String toString() { + return '$runtimeType($name, $lensDirection)'; + } +} + +/// This is thrown when the plugin reports an error. +class CameraException implements Exception { + CameraException(this.code, this.description); + + String code; + String description; + + @override + String toString() => '$runtimeType($code, $description)'; +} + +// Build the UI texture view of the video data with textureId. +class CameraPreview extends StatelessWidget { + const CameraPreview(this.controller); + + final CameraController controller; + + @override + Widget build(BuildContext context) { + return controller.value.isInitialized + ? Texture(textureId: controller._textureId) + : Container(); + } +} + +/// The state of a [CameraController]. +class CameraValue { + const CameraValue({ + this.isInitialized, + this.errorDescription, + this.previewSize, + this.isRecordingVideo, + this.isTakingPicture, + this.isStreamingBytes, + }); + + const CameraValue.uninitialized() + : this( + isInitialized: false, + isRecordingVideo: false, + isTakingPicture: false, + isStreamingBytes: false); + + /// True after [CameraController.initialize] has completed successfully. + final bool isInitialized; + + /// True when a picture capture request has been sent but as not yet returned. + final bool isTakingPicture; + + /// True when the camera is recording (not the same as previewing). + final bool isRecordingVideo; + + /// True when bytes from the camera are being streamed. + final bool isStreamingBytes; + + final String errorDescription; + + /// The size of the preview in pixels. + /// + /// Is `null` until [isInitialized] is `true`. + final Size previewSize; + + /// Convenience getter for `previewSize.height / previewSize.width`. + /// + /// Can only be called when [initialize] is done. + double get aspectRatio => previewSize.height / previewSize.width; + + bool get hasError => errorDescription != null; + + CameraValue copyWith({ + bool isInitialized, + bool isRecordingVideo, + bool isTakingPicture, + bool isStreamingBytes, + String errorDescription, + Size previewSize, + }) { + return CameraValue( + isInitialized: isInitialized ?? this.isInitialized, + errorDescription: errorDescription, + previewSize: previewSize ?? this.previewSize, + isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, + isTakingPicture: isTakingPicture ?? this.isTakingPicture, + isStreamingBytes: isStreamingBytes ?? this.isStreamingBytes, + ); + } + + @override + String toString() { + return '$runtimeType(' + 'isRecordingVideo: $isRecordingVideo, ' + 'isRecordingVideo: $isRecordingVideo, ' + 'isInitialized: $isInitialized, ' + 'errorDescription: $errorDescription, ' + 'previewSize: $previewSize, ' + 'isStreamingBytes: $isStreamingBytes)'; + } +} + +/// Controls a device camera. +/// +/// Use [availableCameras] to get a list of available cameras. +/// +/// Before using a [CameraController] a call to [initialize] must complete. +/// +/// To show the camera preview on the screen use a [CameraPreview] widget. +class CameraController extends ValueNotifier { + CameraController(this.description, this.resolutionPreset) + : super(const CameraValue.uninitialized()); + + final CameraDescription description; + final ResolutionPreset resolutionPreset; + + int _textureId; + bool _isDisposed = false; + StreamSubscription _eventSubscription; + StreamSubscription _byteStreamSubscription; + Completer _creatingCompleter; + + /// Initializes the camera on the device. + /// + /// Throws a [CameraException] if the initialization fails. + Future initialize() async { + if (_isDisposed) { + return Future.value(); + } + try { + _creatingCompleter = Completer(); + final Map reply = await _channel.invokeMethod( + 'initialize', + { + 'cameraName': description.name, + 'resolutionPreset': serializeResolutionPreset(resolutionPreset), + }, + ); + _textureId = reply['textureId']; + value = value.copyWith( + isInitialized: true, + previewSize: Size( + reply['previewWidth'].toDouble(), + reply['previewHeight'].toDouble(), + ), + ); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + _eventSubscription = + EventChannel('flutter.io/cameraPlugin/cameraEvents$_textureId') + .receiveBroadcastStream() + .listen(_listener); + _creatingCompleter.complete(); + return _creatingCompleter.future; + } + + /// Listen to events from the native plugins. + /// + /// A "cameraClosing" event is sent when the camera is closed automatically by the system (for example when the app go to background). The plugin will try to reopen the camera automatically but any ongoing recording will end. + void _listener(dynamic event) { + final Map map = event; + if (_isDisposed) { + return; + } + + switch (map['eventType']) { + case 'error': + value = value.copyWith(errorDescription: event['errorDescription']); + break; + case 'cameraClosing': + value = value.copyWith(isRecordingVideo: false); + break; + } + } + + /// Captures an image and saves it to [path]. + /// + /// A path can for example be obtained using + /// [path_provider](https://pub.dartlang.org/packages/path_provider). + /// + /// If a file already exists at the provided path an error will be thrown. + /// The file can be read as this function returns. + /// + /// Throws a [CameraException] if the capture fails. + Future takePicture(String path) async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController.', + 'takePicture was called on uninitialized CameraController', + ); + } + if (value.isTakingPicture) { + throw CameraException( + 'Previous capture has not returned yet.', + 'takePicture was called before the previous capture returned.', + ); + } + try { + value = value.copyWith(isTakingPicture: true); + await _channel.invokeMethod( + 'takePicture', + {'textureId': _textureId, 'path': path}, + ); + value = value.copyWith(isTakingPicture: false); + } on PlatformException catch (e) { + value = value.copyWith(isTakingPicture: false); + throw CameraException(e.code, e.message); + } + } + + /// Start a streaming bytes from platform camera. + /// + /// The stream will always use the latest image and discard the others. + /// + /// [onAvailable] is a method that takes a [CameraImage] and + /// returns `void`. + /// + /// Throws a [CameraException] if byte streaming or video recording has + /// already started. + Future startByteStream(OnLatestImageAvailable onAvailable) async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'startByteStream was called on uninitialized CameraController.', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'startByteStream was called while a video is being recorded.', + ); + } + if (value.isStreamingBytes) { + throw CameraException( + 'A camera has started streaming bytes.', + 'startByteStream was called while a camera was streaming bytes.', + ); + } + + try { + await _channel.invokeMethod('startByteStream'); + value = value.copyWith(isStreamingBytes: true); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + const EventChannel cameraEventChannel = + EventChannel('plugins.flutter.io/camera/bytes'); + _byteStreamSubscription = + cameraEventChannel.receiveBroadcastStream().listen( + (dynamic imageData) { + onAvailable(CameraImage._fromPlatformData(imageData)); + }, + ); + } + + /// Stop the stream of bytes from the camera. + /// + /// Throws a [CameraException] if byte streaming was not started or video + /// recording was started. + Future stopByteStream() async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'stopByteStream was called on uninitialized CameraController.', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'stopByteStream was called while a video is being recorded.', + ); + } + if (!value.isStreamingBytes) { + throw CameraException( + 'No camera is streaming bytes', + 'stopByteStream was called when no camera is streaming bytes.', + ); + } + + try { + value = value.copyWith(isStreamingBytes: false); + await _channel.invokeMethod('stopByteStream'); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + + _byteStreamSubscription.cancel(); + _byteStreamSubscription = null; + } + + /// Start a video recording and save the file to [path]. + /// + /// A path can for example be obtained using + /// [path_provider](https://pub.dartlang.org/packages/path_provider). + /// + /// The file is written on the flight as the video is being recorded. + /// If a file already exists at the provided path an error will be thrown. + /// The file can be read as soon as [stopVideoRecording] returns. + /// + /// Throws a [CameraException] if the capture fails. + Future startVideoRecording(String filePath) async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'startVideoRecording was called on uninitialized CameraController', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'startVideoRecording was called when a recording is already started.', + ); + } + if (value.isStreamingBytes) { + throw CameraException( + 'A camera has started streaming bytes.', + 'startVideoRecording was called while a camera was streaming bytes.', + ); + } + + try { + await _channel.invokeMethod( + 'startVideoRecording', + {'textureId': _textureId, 'filePath': filePath}, + ); + value = value.copyWith(isRecordingVideo: true); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + /// Stop recording. + Future stopVideoRecording() async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'stopVideoRecording was called on uninitialized CameraController', + ); + } + if (!value.isRecordingVideo) { + throw CameraException( + 'No video is recording', + 'stopVideoRecording was called when no video is recording.', + ); + } + try { + value = value.copyWith(isRecordingVideo: false); + await _channel.invokeMethod( + 'stopVideoRecording', + {'textureId': _textureId}, + ); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } + + /// Releases the resources of this camera. + @override + Future dispose() async { + if (_isDisposed) { + return; + } + _isDisposed = true; + super.dispose(); + if (_creatingCompleter != null) { + await _creatingCompleter.future; + await _channel.invokeMethod( + 'dispose', + {'textureId': _textureId}, + ); + await _eventSubscription?.cancel(); + } + } +} diff --git a/packages/camera/lib/src/camera_image.dart b/packages/camera/lib/camera_image.dart similarity index 99% rename from packages/camera/lib/src/camera_image.dart rename to packages/camera/lib/camera_image.dart index 11a1fbea73d4..f1af6e086e2c 100644 --- a/packages/camera/lib/src/camera_image.dart +++ b/packages/camera/lib/camera_image.dart @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -part of camera; +part of 'camera.dart'; /// A single color plane of image data. /// diff --git a/packages/camera/lib/src/camera.dart b/packages/camera/lib/src/camera.dart deleted file mode 100644 index cc046e8b5f94..000000000000 --- a/packages/camera/lib/src/camera.dart +++ /dev/null @@ -1,454 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -part of camera; - -final MethodChannel _channel = const MethodChannel('plugins.flutter.io/camera') - ..invokeMethod('init'); - -enum CameraLensDirection { front, back, external } - -enum ResolutionPreset { low, medium, high } - -typedef void OnLatestImageAvailable(CameraImage image); - -/// Returns the resolution preset as a String. -String serializeResolutionPreset(ResolutionPreset resolutionPreset) { - switch (resolutionPreset) { - case ResolutionPreset.high: - return 'high'; - case ResolutionPreset.medium: - return 'medium'; - case ResolutionPreset.low: - return 'low'; - } - throw ArgumentError('Unknown ResolutionPreset value'); -} - -CameraLensDirection _parseCameraLensDirection(String string) { - switch (string) { - case 'front': - return CameraLensDirection.front; - case 'back': - return CameraLensDirection.back; - case 'external': - return CameraLensDirection.external; - } - throw ArgumentError('Unknown CameraLensDirection value'); -} - -/// Completes with a list of available cameras. -/// -/// May throw a [CameraException]. -Future> availableCameras() async { - try { - final List cameras = - await _channel.invokeMethod('availableCameras'); - return cameras.map((dynamic camera) { - return CameraDescription( - name: camera['name'], - lensDirection: _parseCameraLensDirection(camera['lensFacing']), - ); - }).toList(); - } on PlatformException catch (e) { - throw CameraException(e.code, e.message); - } -} - -class CameraDescription { - CameraDescription({this.name, this.lensDirection}); - - final String name; - final CameraLensDirection lensDirection; - - @override - bool operator ==(Object o) { - return o is CameraDescription && - o.name == name && - o.lensDirection == lensDirection; - } - - @override - int get hashCode { - return hashValues(name, lensDirection); - } - - @override - String toString() { - return '$runtimeType($name, $lensDirection)'; - } -} - -/// This is thrown when the plugin reports an error. -class CameraException implements Exception { - CameraException(this.code, this.description); - - String code; - String description; - - @override - String toString() => '$runtimeType($code, $description)'; -} - -// Build the UI texture view of the video data with textureId. -class CameraPreview extends StatelessWidget { - const CameraPreview(this.controller); - - final CameraController controller; - - @override - Widget build(BuildContext context) { - return controller.value.isInitialized - ? Texture(textureId: controller._textureId) - : Container(); - } -} - -/// The state of a [CameraController]. -class CameraValue { - const CameraValue({ - this.isInitialized, - this.errorDescription, - this.previewSize, - this.isRecordingVideo, - this.isTakingPicture, - this.isStreamingBytes, - }); - - const CameraValue.uninitialized() - : this( - isInitialized: false, - isRecordingVideo: false, - isTakingPicture: false, - isStreamingBytes: false); - - /// True after [CameraController.initialize] has completed successfully. - final bool isInitialized; - - /// True when a picture capture request has been sent but as not yet returned. - final bool isTakingPicture; - - /// True when the camera is recording (not the same as previewing). - final bool isRecordingVideo; - - /// True when bytes from the camera are being streamed. - final bool isStreamingBytes; - - final String errorDescription; - - /// The size of the preview in pixels. - /// - /// Is `null` until [isInitialized] is `true`. - final Size previewSize; - - /// Convenience getter for `previewSize.height / previewSize.width`. - /// - /// Can only be called when [initialize] is done. - double get aspectRatio => previewSize.height / previewSize.width; - - bool get hasError => errorDescription != null; - - CameraValue copyWith({ - bool isInitialized, - bool isRecordingVideo, - bool isTakingPicture, - bool isStreamingBytes, - String errorDescription, - Size previewSize, - }) { - return CameraValue( - isInitialized: isInitialized ?? this.isInitialized, - errorDescription: errorDescription, - previewSize: previewSize ?? this.previewSize, - isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, - isTakingPicture: isTakingPicture ?? this.isTakingPicture, - isStreamingBytes: isStreamingBytes ?? this.isStreamingBytes, - ); - } - - @override - String toString() { - return '$runtimeType(' - 'isRecordingVideo: $isRecordingVideo, ' - 'isRecordingVideo: $isRecordingVideo, ' - 'isInitialized: $isInitialized, ' - 'errorDescription: $errorDescription, ' - 'previewSize: $previewSize, ' - 'isStreamingBytes: $isStreamingBytes)'; - } -} - -/// Controls a device camera. -/// -/// Use [availableCameras] to get a list of available cameras. -/// -/// Before using a [CameraController] a call to [initialize] must complete. -/// -/// To show the camera preview on the screen use a [CameraPreview] widget. -class CameraController extends ValueNotifier { - CameraController(this.description, this.resolutionPreset) - : super(const CameraValue.uninitialized()); - - final CameraDescription description; - final ResolutionPreset resolutionPreset; - - int _textureId; - bool _isDisposed = false; - StreamSubscription _eventSubscription; - StreamSubscription _byteStreamSubscription; - Completer _creatingCompleter; - - /// Initializes the camera on the device. - /// - /// Throws a [CameraException] if the initialization fails. - Future initialize() async { - if (_isDisposed) { - return Future.value(); - } - try { - _creatingCompleter = Completer(); - final Map reply = await _channel.invokeMethod( - 'initialize', - { - 'cameraName': description.name, - 'resolutionPreset': serializeResolutionPreset(resolutionPreset), - }, - ); - _textureId = reply['textureId']; - value = value.copyWith( - isInitialized: true, - previewSize: Size( - reply['previewWidth'].toDouble(), - reply['previewHeight'].toDouble(), - ), - ); - } on PlatformException catch (e) { - throw CameraException(e.code, e.message); - } - _eventSubscription = - EventChannel('flutter.io/cameraPlugin/cameraEvents$_textureId') - .receiveBroadcastStream() - .listen(_listener); - _creatingCompleter.complete(); - return _creatingCompleter.future; - } - - /// Listen to events from the native plugins. - /// - /// A "cameraClosing" event is sent when the camera is closed automatically by the system (for example when the app go to background). The plugin will try to reopen the camera automatically but any ongoing recording will end. - void _listener(dynamic event) { - final Map map = event; - if (_isDisposed) { - return; - } - - switch (map['eventType']) { - case 'error': - value = value.copyWith(errorDescription: event['errorDescription']); - break; - case 'cameraClosing': - value = value.copyWith(isRecordingVideo: false); - break; - } - } - - /// Captures an image and saves it to [path]. - /// - /// A path can for example be obtained using - /// [path_provider](https://pub.dartlang.org/packages/path_provider). - /// - /// If a file already exists at the provided path an error will be thrown. - /// The file can be read as this function returns. - /// - /// Throws a [CameraException] if the capture fails. - Future takePicture(String path) async { - if (!value.isInitialized || _isDisposed) { - throw CameraException( - 'Uninitialized CameraController.', - 'takePicture was called on uninitialized CameraController', - ); - } - if (value.isTakingPicture) { - throw CameraException( - 'Previous capture has not returned yet.', - 'takePicture was called before the previous capture returned.', - ); - } - try { - value = value.copyWith(isTakingPicture: true); - await _channel.invokeMethod( - 'takePicture', - {'textureId': _textureId, 'path': path}, - ); - value = value.copyWith(isTakingPicture: false); - } on PlatformException catch (e) { - value = value.copyWith(isTakingPicture: false); - throw CameraException(e.code, e.message); - } - } - - /// Start a streaming bytes from platform camera. - /// - /// The stream will always use the latest image and discard the others. - /// - /// [onAvailable] is a method that takes a [CameraImage] and - /// returns `void`. - /// - /// Throws a [CameraException] if byte streaming or video recording has - /// already started. - Future startByteStream(OnLatestImageAvailable onAvailable) async { - if (!value.isInitialized || _isDisposed) { - throw CameraException( - 'Uninitialized CameraController', - 'startByteStream was called on uninitialized CameraController.', - ); - } - if (value.isRecordingVideo) { - throw CameraException( - 'A video recording is already started.', - 'startByteStream was called while a video is being recorded.', - ); - } - if (value.isStreamingBytes) { - throw CameraException( - 'A camera has started streaming bytes.', - 'startByteStream was called while a camera was streaming bytes.', - ); - } - - try { - await _channel.invokeMethod('startByteStream'); - value = value.copyWith(isStreamingBytes: true); - } on PlatformException catch (e) { - throw CameraException(e.code, e.message); - } - const EventChannel cameraEventChannel = - EventChannel('plugins.flutter.io/camera/bytes'); - _byteStreamSubscription = - cameraEventChannel.receiveBroadcastStream().listen( - (dynamic imageData) { - onAvailable(CameraImage._fromPlatformData(imageData)); - }, - ); - } - - /// Stop the stream of bytes from the camera. - /// - /// Throws a [CameraException] if byte streaming was not started or video - /// recording was started. - Future stopByteStream() async { - if (!value.isInitialized || _isDisposed) { - throw CameraException( - 'Uninitialized CameraController', - 'stopByteStream was called on uninitialized CameraController.', - ); - } - if (value.isRecordingVideo) { - throw CameraException( - 'A video recording is already started.', - 'stopByteStream was called while a video is being recorded.', - ); - } - if (!value.isStreamingBytes) { - throw CameraException( - 'No camera is streaming bytes', - 'stopByteStream was called when no camera is streaming bytes.', - ); - } - - try { - value = value.copyWith(isStreamingBytes: false); - await _channel.invokeMethod('stopByteStream'); - } on PlatformException catch (e) { - throw CameraException(e.code, e.message); - } - - _byteStreamSubscription.cancel(); - _byteStreamSubscription = null; - } - - /// Start a video recording and save the file to [path]. - /// - /// A path can for example be obtained using - /// [path_provider](https://pub.dartlang.org/packages/path_provider). - /// - /// The file is written on the flight as the video is being recorded. - /// If a file already exists at the provided path an error will be thrown. - /// The file can be read as soon as [stopVideoRecording] returns. - /// - /// Throws a [CameraException] if the capture fails. - Future startVideoRecording(String filePath) async { - if (!value.isInitialized || _isDisposed) { - throw CameraException( - 'Uninitialized CameraController', - 'startVideoRecording was called on uninitialized CameraController', - ); - } - if (value.isRecordingVideo) { - throw CameraException( - 'A video recording is already started.', - 'startVideoRecording was called when a recording is already started.', - ); - } - if (value.isStreamingBytes) { - throw CameraException( - 'A camera has started streaming bytes.', - 'startVideoRecording was called while a camera was streaming bytes.', - ); - } - - try { - await _channel.invokeMethod( - 'startVideoRecording', - {'textureId': _textureId, 'filePath': filePath}, - ); - value = value.copyWith(isRecordingVideo: true); - } on PlatformException catch (e) { - throw CameraException(e.code, e.message); - } - } - - /// Stop recording. - Future stopVideoRecording() async { - if (!value.isInitialized || _isDisposed) { - throw CameraException( - 'Uninitialized CameraController', - 'stopVideoRecording was called on uninitialized CameraController', - ); - } - if (!value.isRecordingVideo) { - throw CameraException( - 'No video is recording', - 'stopVideoRecording was called when no video is recording.', - ); - } - try { - value = value.copyWith(isRecordingVideo: false); - await _channel.invokeMethod( - 'stopVideoRecording', - {'textureId': _textureId}, - ); - } on PlatformException catch (e) { - throw CameraException(e.code, e.message); - } - } - - /// Releases the resources of this camera. - @override - Future dispose() async { - if (_isDisposed) { - return; - } - _isDisposed = true; - super.dispose(); - if (_creatingCompleter != null) { - await _creatingCompleter.future; - await _channel.invokeMethod( - 'dispose', - {'textureId': _textureId}, - ); - await _eventSubscription?.cancel(); - } - } -} From 615562056e0cebc9eb022b98344cc7f5cce187dd Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 15:48:37 -0800 Subject: [PATCH 28/34] bytestream -> imagestream --- .../flutter/plugins/camera/CameraPlugin.java | 20 +-- packages/camera/ios/Classes/CameraPlugin.m | 50 +++---- packages/camera/lib/camera.dart | 136 +++++++++--------- packages/camera/lib/camera_image.dart | 4 +- 4 files changed, 106 insertions(+), 104 deletions(-) diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index dc3b7dd1b882..d977e06980e0 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -213,17 +213,17 @@ public void onMethodCall(MethodCall call, final Result result) { camera.stopVideoRecording(result); break; } - case "startByteStream": + case "startImageStream": { try { - camera.startPreviewWithByteStream(); + camera.startPreviewWithImageStream(); result.success(null); } catch (CameraAccessException e) { result.error("CameraAccess", e.getMessage(), null); } break; } - case "stopByteStream": + case "stopImageStream": { try { camera.startPreview(); @@ -754,7 +754,7 @@ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession null); } - private void startPreviewWithByteStream() throws CameraAccessException { + private void startPreviewWithImageStream() throws CameraAccessException { closeCaptureSession(); SurfaceTexture surfaceTexture = textureEntry.surfaceTexture(); @@ -793,23 +793,23 @@ public void onConfigured(@NonNull CameraCaptureSession session) { @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { - sendErrorEvent("Failed to configure the camera for streaming bytes."); + sendErrorEvent("Failed to configure the camera for streaming images."); } }, null); - registerByteStreamEventChannel(); + registerImageStreamEventChannel(); } - private void registerByteStreamEventChannel() { + private void registerImageStreamEventChannel() { final EventChannel cameraChannel = - new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/bytes"); + new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/imageStream"); cameraChannel.setStreamHandler( new EventChannel.StreamHandler() { @Override public void onListen(Object o, EventChannel.EventSink eventSink) { - setByteStreamImageAvailableListener(eventSink); + setImageStreamImageAvailableListener(eventSink); } @Override @@ -819,7 +819,7 @@ public void onCancel(Object o) { }); } - private void setByteStreamImageAvailableListener(final EventChannel.EventSink eventSink) { + private void setImageStreamImageAvailableListener(final EventChannel.EventSink eventSink) { imageStreamReader.setOnImageAvailableListener( new ImageReader.OnImageAvailableListener() { @Override diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 188e08376956..83e9a9b243f4 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -22,11 +22,11 @@ @interface FLTSavePhotoDelegate : NSObject - initWithPath:(NSString *)filename result:(FlutterResult)result; @end -@interface FLTByteStreamHandler : NSObject +@interface FLTImageStreamHandler : NSObject @property FlutterEventSink eventSink; @end -@implementation FLTByteStreamHandler +@implementation FLTImageStreamHandler - (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { _eventSink = nil; @@ -85,7 +85,7 @@ @interface FLTCam : NSObject *)messenger; -- (void)stopByteStream; +- (void)startImageStreamWithMessenger:(NSObject *)messenger; +- (void)stopImageStream; - (void)captureToFile:(NSString *)filename result:(FlutterResult)result; @end @@ -220,8 +220,8 @@ - (void)captureOutput:(AVCaptureOutput *)output }); return; } - if (_isStreamingBytes) { - if (_byteStreamHandler.eventSink) { + if (_isStreamingImages) { + if (_imageStreamHandler.eventSink) { CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); @@ -255,7 +255,7 @@ - (void)captureOutput:(AVCaptureOutput *)output imageBuffer[@"format"] = @(videoFormat); imageBuffer[@"planes"] = planes; - _byteStreamHandler.eventSink(imageBuffer); + _imageStreamHandler.eventSink(imageBuffer); CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); } @@ -448,29 +448,29 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result { } } -- (void)startByteStreamWithMessenger:(NSObject *)messenger { - if (!_isStreamingBytes) { +- (void)startImageStreamWithMessenger:(NSObject *)messenger { + if (!_isStreamingImages) { FlutterEventChannel *eventChannel = - [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/bytes" + [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" binaryMessenger:messenger]; - _byteStreamHandler = [[FLTByteStreamHandler alloc] init]; - [eventChannel setStreamHandler:_byteStreamHandler]; + _imageStreamHandler = [[FLTImageStreamHandler alloc] init]; + [eventChannel setStreamHandler:_imageStreamHandler]; - _isStreamingBytes = YES; + _isStreamingImages = YES; } else { _eventSink( - @{@"event" : @"error", @"errorDescription" : @"Bytes from camera are already streaming!"}); + @{@"event" : @"error", @"errorDescription" : @"Images from camera are already streaming!"}); } } -- (void)stopByteStream { - if (_isStreamingBytes) { - _isStreamingBytes = NO; - _byteStreamHandler = nil; +- (void)stopImageStream { + if (_isStreamingImages) { + _isStreamingImages = NO; + _imageStreamHandler = nil; } else { _eventSink( - @{@"event" : @"error", @"errorDescription" : @"Bytes from camera are not streaming!"}); + @{@"event" : @"error", @"errorDescription" : @"Images from camera are not streaming!"}); } } @@ -648,11 +648,11 @@ - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result }); [cam start]; } - } else if ([@"startByteStream" isEqualToString:call.method]) { - [_camera startByteStreamWithMessenger:_messenger]; + } else if ([@"startImageStream" isEqualToString:call.method]) { + [_camera startImageStreamWithMessenger:_messenger]; result(nil); - } else if ([@"stopByteStream" isEqualToString:call.method]) { - [_camera stopByteStream]; + } else if ([@"stopImageStream" isEqualToString:call.method]) { + [_camera stopImageStream]; result(nil); } else { NSDictionary *argsMap = call.arguments; diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index d582bde194ab..e32d2ada074e 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -17,7 +17,7 @@ enum CameraLensDirection { front, back, external } enum ResolutionPreset { low, medium, high } -typedef void OnLatestImageAvailable(CameraImage image); +typedef onLatestImageAvailable = Function(CameraImage image); /// Returns the resolution preset as a String. String serializeResolutionPreset(ResolutionPreset resolutionPreset) { @@ -50,7 +50,7 @@ CameraLensDirection _parseCameraLensDirection(String string) { Future> availableCameras() async { try { final List cameras = - await _channel.invokeMethod('availableCameras'); + await _channel.invokeMethod('availableCameras'); return cameras.map((dynamic camera) { return CameraDescription( name: camera['name'], @@ -119,15 +119,15 @@ class CameraValue { this.previewSize, this.isRecordingVideo, this.isTakingPicture, - this.isStreamingBytes, + this.isStreamingImages, }); const CameraValue.uninitialized() : this( - isInitialized: false, - isRecordingVideo: false, - isTakingPicture: false, - isStreamingBytes: false); + isInitialized: false, + isRecordingVideo: false, + isTakingPicture: false, + isStreamingImages: false); /// True after [CameraController.initialize] has completed successfully. final bool isInitialized; @@ -138,8 +138,8 @@ class CameraValue { /// True when the camera is recording (not the same as previewing). final bool isRecordingVideo; - /// True when bytes from the camera are being streamed. - final bool isStreamingBytes; + /// True when images from the camera are being streamed. + final bool isStreamingImages; final String errorDescription; @@ -159,7 +159,7 @@ class CameraValue { bool isInitialized, bool isRecordingVideo, bool isTakingPicture, - bool isStreamingBytes, + bool isStreamingImages, String errorDescription, Size previewSize, }) { @@ -169,7 +169,7 @@ class CameraValue { previewSize: previewSize ?? this.previewSize, isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, isTakingPicture: isTakingPicture ?? this.isTakingPicture, - isStreamingBytes: isStreamingBytes ?? this.isStreamingBytes, + isStreamingImages: isStreamingImages ?? this.isStreamingImages, ); } @@ -181,7 +181,7 @@ class CameraValue { 'isInitialized: $isInitialized, ' 'errorDescription: $errorDescription, ' 'previewSize: $previewSize, ' - 'isStreamingBytes: $isStreamingBytes)'; + 'isStreamingImages: $isStreamingImages)'; } } @@ -202,7 +202,7 @@ class CameraController extends ValueNotifier { int _textureId; bool _isDisposed = false; StreamSubscription _eventSubscription; - StreamSubscription _byteStreamSubscription; + StreamSubscription _imageStreamSubscription; Completer _creatingCompleter; /// Initializes the camera on the device. @@ -213,29 +213,29 @@ class CameraController extends ValueNotifier { return Future.value(); } try { - _creatingCompleter = Completer(); - final Map reply = await _channel.invokeMethod( - 'initialize', - { - 'cameraName': description.name, - 'resolutionPreset': serializeResolutionPreset(resolutionPreset), - }, - ); - _textureId = reply['textureId']; - value = value.copyWith( - isInitialized: true, - previewSize: Size( - reply['previewWidth'].toDouble(), - reply['previewHeight'].toDouble(), - ), - ); + _creatingCompleter = Completer(); + final Map reply = await _channel.invokeMethod( + 'initialize', + { + 'cameraName': description.name, + 'resolutionPreset': serializeResolutionPreset(resolutionPreset), + }, + ); + _textureId = reply['textureId']; + value = value.copyWith( + isInitialized: true, + previewSize: Size( + reply['previewWidth'].toDouble(), + reply['previewHeight'].toDouble(), + ), + ); } on PlatformException catch (e) { - throw CameraException(e.code, e.message); + throw CameraException(e.code, e.message); } _eventSubscription = - EventChannel('flutter.io/cameraPlugin/cameraEvents$_textureId') - .receiveBroadcastStream() - .listen(_listener); + EventChannel('flutter.io/cameraPlugin/cameraEvents$_textureId') + .receiveBroadcastStream() + .listen(_listener); _creatingCompleter.complete(); return _creatingCompleter.future; } @@ -294,84 +294,86 @@ class CameraController extends ValueNotifier { } } - /// Start a streaming bytes from platform camera. + /// Start streaming images from platform camera. /// - /// The stream will always use the latest image and discard the others. + /// Settings for capturing images on iOS and Android is set to always use the + /// latest image available and drop all others. /// - /// [onAvailable] is a method that takes a [CameraImage] and - /// returns `void`. + /// When running continuously, this function runs best with + /// [ResolutionPreset.low]. Running on [ResolutionPreset.high] can have + /// significant frame rate drops depending on the device. /// - /// Throws a [CameraException] if byte streaming or video recording has + /// Throws a [CameraException] if image streaming or video recording has /// already started. - Future startByteStream(OnLatestImageAvailable onAvailable) async { + Future startImageStream(onLatestImageAvailable onAvailable) async { if (!value.isInitialized || _isDisposed) { throw CameraException( 'Uninitialized CameraController', - 'startByteStream was called on uninitialized CameraController.', + 'startImageStream was called on uninitialized CameraController.', ); } if (value.isRecordingVideo) { throw CameraException( 'A video recording is already started.', - 'startByteStream was called while a video is being recorded.', + 'startImageStream was called while a video is being recorded.', ); } - if (value.isStreamingBytes) { + if (value.isStreamingImages) { throw CameraException( - 'A camera has started streaming bytes.', - 'startByteStream was called while a camera was streaming bytes.', + 'A camera has started streaming images.', + 'startImageStream was called while a camera was streaming images.', ); } try { - await _channel.invokeMethod('startByteStream'); - value = value.copyWith(isStreamingBytes: true); + await _channel.invokeMethod('startImageStream'); + value = value.copyWith(isStreamingImages: true); } on PlatformException catch (e) { throw CameraException(e.code, e.message); } const EventChannel cameraEventChannel = - EventChannel('plugins.flutter.io/camera/bytes'); - _byteStreamSubscription = + EventChannel('plugins.flutter.io/camera/imageStream'); + _imageStreamSubscription = cameraEventChannel.receiveBroadcastStream().listen( - (dynamic imageData) { - onAvailable(CameraImage._fromPlatformData(imageData)); - }, - ); + (dynamic imageData) { + onAvailable(CameraImage._fromPlatformData(imageData)); + }, + ); } - /// Stop the stream of bytes from the camera. + /// Stop streaming images from platform camera. /// - /// Throws a [CameraException] if byte streaming was not started or video + /// Throws a [CameraException] if image streaming was not started or video /// recording was started. - Future stopByteStream() async { + Future stopImageStream() async { if (!value.isInitialized || _isDisposed) { throw CameraException( 'Uninitialized CameraController', - 'stopByteStream was called on uninitialized CameraController.', + 'stopImageStream was called on uninitialized CameraController.', ); } if (value.isRecordingVideo) { throw CameraException( 'A video recording is already started.', - 'stopByteStream was called while a video is being recorded.', + 'stopImageStream was called while a video is being recorded.', ); } - if (!value.isStreamingBytes) { + if (!value.isStreamingImages) { throw CameraException( - 'No camera is streaming bytes', - 'stopByteStream was called when no camera is streaming bytes.', + 'No camera is streaming images', + 'stopImageStream was called when no camera is streaming images.', ); } try { - value = value.copyWith(isStreamingBytes: false); - await _channel.invokeMethod('stopByteStream'); + value = value.copyWith(isStreamingImages: false); + await _channel.invokeMethod('stopImageStream'); } on PlatformException catch (e) { throw CameraException(e.code, e.message); } - _byteStreamSubscription.cancel(); - _byteStreamSubscription = null; + _imageStreamSubscription.cancel(); + _imageStreamSubscription = null; } /// Start a video recording and save the file to [path]. @@ -397,10 +399,10 @@ class CameraController extends ValueNotifier { 'startVideoRecording was called when a recording is already started.', ); } - if (value.isStreamingBytes) { + if (value.isStreamingImages) { throw CameraException( - 'A camera has started streaming bytes.', - 'startVideoRecording was called while a camera was streaming bytes.', + 'A camera has started streaming images.', + 'startVideoRecording was called while a camera was streaming images.', ); } diff --git a/packages/camera/lib/camera_image.dart b/packages/camera/lib/camera_image.dart index f1af6e086e2c..2dd665ddb400 100644 --- a/packages/camera/lib/camera_image.dart +++ b/packages/camera/lib/camera_image.dart @@ -9,7 +9,7 @@ part of 'camera.dart'; /// The number and meaning of the planes in an image are determined by the /// format of the Image. class Plane { - Plane._fromPlatformData(dynamic data) + Plane._fromPlatformData(Map data) : bytes = data['bytes'], bytesPerPixel = data['bytesPerPixel'], bytesPerRow = data['bytesPerRow'], @@ -92,7 +92,7 @@ ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { /// Although not all image formats are planar on iOS, we treat 1-dimensional /// images as single planar images. class CameraImage { - CameraImage._fromPlatformData(dynamic data) + CameraImage._fromPlatformData(Map data) : format = ImageFormat._fromPlatformData(data['format']), height = data['height'], width = data['width'], From 5898b4cb21e4abe140ad54e14d1d047b76f93afa Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 16:14:20 -0800 Subject: [PATCH 29/34] Comments and names --- packages/camera/CHANGELOG.md | 3 ++- packages/camera/README.md | 2 ++ .../java/io/flutter/plugins/camera/CameraPlugin.java | 4 ++-- packages/camera/lib/camera.dart | 9 +++++---- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/camera/CHANGELOG.md b/packages/camera/CHANGELOG.md index c127c90a38bb..5de0b07ee1cd 100644 --- a/packages/camera/CHANGELOG.md +++ b/packages/camera/CHANGELOG.md @@ -1,6 +1,7 @@ ## 0.2.7 -* Add byte streaming capability for the camera. +* Add access to the image stream from Dart. +* Use `cameraController.startImageStream(listener)` to process the images. ## 0.2.6 diff --git a/packages/camera/README.md b/packages/camera/README.md index 58d30bdfb894..e0d66ef4cb34 100644 --- a/packages/camera/README.md +++ b/packages/camera/README.md @@ -8,6 +8,8 @@ A Flutter plugin for iOS and Android allowing access to the device cameras. * Display live camera preview in a widget. * Snapshots can be captured and saved to a file. +* Record video. +* Add access to the image stream from Dart. ## Installation diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index d977e06980e0..10288ebda095 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -802,10 +802,10 @@ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession } private void registerImageStreamEventChannel() { - final EventChannel cameraChannel = + final EventChannel imageStreamChannel = new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/imageStream"); - cameraChannel.setStreamHandler( + imageStreamChannel.setStreamHandler( new EventChannel.StreamHandler() { @Override public void onListen(Object o, EventChannel.EventSink eventSink) { diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index e32d2ada074e..151f732d3918 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -297,11 +297,12 @@ class CameraController extends ValueNotifier { /// Start streaming images from platform camera. /// /// Settings for capturing images on iOS and Android is set to always use the - /// latest image available and drop all others. + /// latest image available from the camera and will drop all other images. /// - /// When running continuously, this function runs best with - /// [ResolutionPreset.low]. Running on [ResolutionPreset.high] can have - /// significant frame rate drops depending on the device. + /// When running continuously with [CameraPreview] widget, this function runs + /// best with [ResolutionPreset.low]. Running on [ResolutionPreset.high] can + /// have significant frame rate drops for [CameraPreview] on lower end + /// devices. /// /// Throws a [CameraException] if image streaming or video recording has /// already started. From 7617bb934e8740a17a0ae2e685de95a25d611a05 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 16:21:37 -0800 Subject: [PATCH 30/34] Formatting --- packages/camera/ios/Classes/CameraPlugin.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 83e9a9b243f4..79e6e343ed91 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -348,8 +348,8 @@ - (CVPixelBufferRef)copyPixelBuffer { return [self convertYUVImageToBGRA:pixelBuffer]; } -// Since video format was changed to kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange we have to convert -// image to a usable format for flutter textures. Which is kCVPixelFormatType_32BGRA. +// Since video format was changed to kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange we have to +// convert image to a usable format for flutter textures. Which is kCVPixelFormatType_32BGRA. - (CVPixelBufferRef)convertYUVImageToBGRA:(CVPixelBufferRef)pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); From f18db98928ef5224fc8b0070f4bd325de98e36c0 Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 16:45:27 -0800 Subject: [PATCH 31/34] Added resolution and fps todo --- packages/camera/lib/camera.dart | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 151f732d3918..62b6ec706560 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -306,6 +306,8 @@ class CameraController extends ValueNotifier { /// /// Throws a [CameraException] if image streaming or video recording has /// already started. + /// + /// TODO(bmparr): Add settings for resolution and fps. Future startImageStream(onLatestImageAvailable onAvailable) async { if (!value.isInitialized || _isDisposed) { throw CameraException( From 0bf466a9826adb0ba163dd092ec0bd56eb88c78b Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 16:47:59 -0800 Subject: [PATCH 32/34] Unmodify file --- .../ios/Runner.xcodeproj/project.pbxproj | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj index f3ac434ae2e7..5a54057fee45 100644 --- a/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -161,6 +161,7 @@ 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, FE224661708E6DA2A0F8B952 /* [CP] Embed Pods Frameworks */, + EACF0929FF12B6CC70C2D6BE /* [CP] Copy Pods Resources */, ); buildRules = ( ); @@ -182,7 +183,7 @@ TargetAttributes = { 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; - DevelopmentTeam = S8QB4VV633; + DevelopmentTeam = EQHXZ8M8AV; }; }; }; @@ -268,6 +269,21 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; + EACF0929FF12B6CC70C2D6BE /* [CP] Copy Pods Resources */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "[CP] Copy Pods Resources"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; + showEnvVarsInLog = 0; + }; FE224661708E6DA2A0F8B952 /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -275,7 +291,7 @@ ); inputPaths = ( "${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh", - "${PODS_ROOT}/../.symlinks/flutter/ios/Flutter.framework", + "${PODS_ROOT}/../../../../../../flutter/bin/cache/artifacts/engine/ios-release/Flutter.framework", ); name = "[CP] Embed Pods Frameworks"; outputPaths = ( @@ -417,7 +433,7 @@ buildSettings = { ARCHS = arm64; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = S8QB4VV633; + DEVELOPMENT_TEAM = EQHXZ8M8AV; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -440,7 +456,7 @@ buildSettings = { ARCHS = arm64; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = S8QB4VV633; + DEVELOPMENT_TEAM = EQHXZ8M8AV; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", From a53222b8af7a85bcf11efdcc5ec134e79842fa9a Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 16:54:11 -0800 Subject: [PATCH 33/34] Empty commit to rerun tests From fe965d1aeb545fb676224b073293e5ac36df027e Mon Sep 17 00:00:00 2001 From: Maurice Parrish Date: Wed, 19 Dec 2018 17:30:01 -0800 Subject: [PATCH 34/34] Remove TODO from documentation --- packages/camera/lib/camera.dart | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 62b6ec706560..19028891ad84 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -306,8 +306,7 @@ class CameraController extends ValueNotifier { /// /// Throws a [CameraException] if image streaming or video recording has /// already started. - /// - /// TODO(bmparr): Add settings for resolution and fps. + // TODO(bmparr): Add settings for resolution and fps. Future startImageStream(onLatestImageAvailable onAvailable) async { if (!value.isInitialized || _isDisposed) { throw CameraException(