diff --git a/packages/camera/CHANGELOG.md b/packages/camera/CHANGELOG.md index 68b0afd261e9..314ed79ee969 100644 --- a/packages/camera/CHANGELOG.md +++ b/packages/camera/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.2.8 + +* Add access to the image stream from Dart. +* Use `cameraController.startImageStream(listener)` to process the images. + ## 0.2.7 * Fix issue with crash when the physical device's orientation is unknown. diff --git a/packages/camera/README.md b/packages/camera/README.md index 58d30bdfb894..e0d66ef4cb34 100644 --- a/packages/camera/README.md +++ b/packages/camera/README.md @@ -8,6 +8,8 @@ A Flutter plugin for iOS and Android allowing access to the device cameras. * Display live camera preview in a widget. * Snapshots can be captured and saved to a file. +* Record video. +* Add access to the image stream from Dart. ## Installation diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index fd2bf44699d5..709c2f379bb0 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -213,6 +213,26 @@ public void onMethodCall(MethodCall call, final Result result) { camera.stopVideoRecording(result); break; } + case "startImageStream": + { + try { + camera.startPreviewWithImageStream(); + result.success(null); + } catch (CameraAccessException e) { + result.error("CameraAccess", e.getMessage(), null); + } + break; + } + case "stopImageStream": + { + try { + camera.startPreview(); + result.success(null); + } catch (CameraAccessException e) { + result.error("CameraAccess", e.getMessage(), null); + } + break; + } case "dispose": { if (camera != null) { @@ -258,7 +278,8 @@ private class Camera { private CameraDevice cameraDevice; private CameraCaptureSession cameraCaptureSession; private EventChannel.EventSink eventSink; - private ImageReader imageReader; + private ImageReader pictureImageReader; + private ImageReader imageStreamReader; private int sensorOrientation; private boolean isFrontFacing; private String cameraName; @@ -458,9 +479,15 @@ private void open(@Nullable final Result result) { if (result != null) result.error("cameraPermission", "Camera permission not granted", null); } else { try { - imageReader = + pictureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2); + + // Used to steam image byte data to dart side. + imageStreamReader = + ImageReader.newInstance( + previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); + cameraManager.openCamera( cameraName, new CameraDevice.StateCallback() { @@ -553,7 +580,7 @@ private void takePicture(String filePath, @NonNull final Result result) { return; } - imageReader.setOnImageAvailableListener( + pictureImageReader.setOnImageAvailableListener( new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { @@ -571,7 +598,7 @@ public void onImageAvailable(ImageReader reader) { try { final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); - captureBuilder.addTarget(imageReader.getSurface()); + captureBuilder.addTarget(pictureImageReader.getSurface()); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getMediaOrientation()); cameraCaptureSession.capture( @@ -697,7 +724,7 @@ private void startPreview() throws CameraAccessException { surfaces.add(previewSurface); captureRequestBuilder.addTarget(previewSurface); - surfaces.add(imageReader.getSurface()); + surfaces.add(pictureImageReader.getSurface()); cameraDevice.createCaptureSession( surfaces, @@ -727,6 +754,107 @@ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession null); } + private void startPreviewWithImageStream() throws CameraAccessException { + closeCaptureSession(); + + SurfaceTexture surfaceTexture = textureEntry.surfaceTexture(); + surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); + + captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); + + List surfaces = new ArrayList<>(); + + Surface previewSurface = new Surface(surfaceTexture); + surfaces.add(previewSurface); + captureRequestBuilder.addTarget(previewSurface); + + surfaces.add(imageStreamReader.getSurface()); + captureRequestBuilder.addTarget(imageStreamReader.getSurface()); + + cameraDevice.createCaptureSession( + surfaces, + new CameraCaptureSession.StateCallback() { + @Override + public void onConfigured(@NonNull CameraCaptureSession session) { + if (cameraDevice == null) { + sendErrorEvent("The camera was closed during configuration."); + return; + } + try { + cameraCaptureSession = session; + captureRequestBuilder.set( + CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); + cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null); + } catch (CameraAccessException e) { + sendErrorEvent(e.getMessage()); + } + } + + @Override + public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { + sendErrorEvent("Failed to configure the camera for streaming images."); + } + }, + null); + + registerImageStreamEventChannel(); + } + + private void registerImageStreamEventChannel() { + final EventChannel imageStreamChannel = + new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/imageStream"); + + imageStreamChannel.setStreamHandler( + new EventChannel.StreamHandler() { + @Override + public void onListen(Object o, EventChannel.EventSink eventSink) { + setImageStreamImageAvailableListener(eventSink); + } + + @Override + public void onCancel(Object o) { + imageStreamReader.setOnImageAvailableListener(null, null); + } + }); + } + + private void setImageStreamImageAvailableListener(final EventChannel.EventSink eventSink) { + imageStreamReader.setOnImageAvailableListener( + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(final ImageReader reader) { + Image img = reader.acquireLatestImage(); + if (img == null) return; + + List> planes = new ArrayList<>(); + for (Image.Plane plane : img.getPlanes()) { + ByteBuffer buffer = plane.getBuffer(); + + byte[] bytes = new byte[buffer.remaining()]; + buffer.get(bytes, 0, bytes.length); + + Map planeBuffer = new HashMap<>(); + planeBuffer.put("bytesPerRow", plane.getRowStride()); + planeBuffer.put("bytesPerPixel", plane.getPixelStride()); + planeBuffer.put("bytes", bytes); + + planes.add(planeBuffer); + } + + Map imageBuffer = new HashMap<>(); + imageBuffer.put("width", img.getWidth()); + imageBuffer.put("height", img.getHeight()); + imageBuffer.put("format", img.getFormat()); + imageBuffer.put("planes", planes); + + eventSink.success(imageBuffer); + img.close(); + } + }, + null); + } + private void sendErrorEvent(String errorDescription) { if (eventSink != null) { Map event = new HashMap<>(); @@ -750,9 +878,13 @@ private void close() { cameraDevice.close(); cameraDevice = null; } - if (imageReader != null) { - imageReader.close(); - imageReader = null; + if (pictureImageReader != null) { + pictureImageReader.close(); + pictureImageReader = null; + } + if (imageStreamReader != null) { + imageStreamReader.close(); + imageStreamReader = null; } if (mediaRecorder != null) { mediaRecorder.reset(); diff --git a/packages/camera/ios/Classes/CameraPlugin.m b/packages/camera/ios/Classes/CameraPlugin.m index 42cc2e42894c..79e6e343ed91 100644 --- a/packages/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/ios/Classes/CameraPlugin.m @@ -1,5 +1,6 @@ #import "CameraPlugin.h" #import +#import #import @interface NSError (FlutterError) @@ -21,6 +22,24 @@ @interface FLTSavePhotoDelegate : NSObject - initWithPath:(NSString *)filename result:(FlutterResult)result; @end +@interface FLTImageStreamHandler : NSObject +@property FlutterEventSink eventSink; +@end + +@implementation FLTImageStreamHandler + +- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)events { + _eventSink = events; + return nil; +} +@end + @implementation FLTSavePhotoDelegate { /// Used to keep the delegate alive until didFinishProcessingPhotoSampleBuffer. FLTSavePhotoDelegate *selfReference; @@ -66,6 +85,7 @@ @interface FLTCam : NSObject *)messenger; +- (void)stopImageStream; - (void)captureToFile:(NSString *)filename result:(FlutterResult)result; @end @implementation FLTCam +// Yuv420 format used for iOS 10+, which is minimum requirement for this plugin. +// Format is used to stream image byte data to dart. +FourCharCode const videoFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; + - (instancetype)initWithCameraName:(NSString *)cameraName resolutionPreset:(NSString *)resolutionPreset error:(NSError **)error { @@ -102,13 +132,16 @@ - (instancetype)initWithCameraName:(NSString *)cameraName _captureSession = [[AVCaptureSession alloc] init]; AVCaptureSessionPreset preset; if ([resolutionPreset isEqualToString:@"high"]) { - preset = AVCaptureSessionPresetHigh; + preset = AVCaptureSessionPreset1280x720; + _previewSize = CGSizeMake(1280, 720); } else if ([resolutionPreset isEqualToString:@"medium"]) { - preset = AVCaptureSessionPresetMedium; + preset = AVCaptureSessionPreset640x480; + _previewSize = CGSizeMake(640, 480); } else { NSAssert([resolutionPreset isEqualToString:@"low"], @"Unknown resolution preset %@", resolutionPreset); - preset = AVCaptureSessionPresetLow; + preset = AVCaptureSessionPreset352x288; + _previewSize = CGSizeMake(352, 288); } _captureSession.sessionPreset = preset; _captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName]; @@ -119,13 +152,15 @@ - (instancetype)initWithCameraName:(NSString *)cameraName *error = localError; return nil; } - CMVideoDimensions dimensions = - CMVideoFormatDescriptionGetDimensions([[_captureDevice activeFormat] formatDescription]); - _previewSize = CGSizeMake(dimensions.width, dimensions.height); + + vImageBuffer_Init(&_destinationBuffer, _previewSize.width, _previewSize.height, 32, + kvImageNoFlags); + vImageBuffer_Init(&_conversionBuffer, _previewSize.width, _previewSize.height, 32, + kvImageNoFlags); _captureVideoOutput = [AVCaptureVideoDataOutput new]; _captureVideoOutput.videoSettings = - @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)}; + @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)}; [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES]; [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; @@ -185,6 +220,46 @@ - (void)captureOutput:(AVCaptureOutput *)output }); return; } + if (_isStreamingImages) { + if (_imageStreamHandler.eventSink) { + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer); + size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer); + + NSMutableArray *planes = [NSMutableArray array]; + + size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); + for (int i = 0; i < planeCount; i++) { + void *planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i); + size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i); + size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i); + size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i); + + NSNumber *length = @(bytesPerRow * height); + NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue]; + + NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary]; + planeBuffer[@"bytesPerRow"] = @(bytesPerRow); + planeBuffer[@"width"] = @(width); + planeBuffer[@"height"] = @(height); + planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes]; + + [planes addObject:planeBuffer]; + } + + NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary]; + imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth]; + imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight]; + imageBuffer[@"format"] = @(videoFormat); + imageBuffer[@"planes"] = planes; + + _imageStreamHandler.eventSink(imageBuffer); + + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + } + } if (_isRecording) { if (_videoWriter.status == AVAssetWriterStatusFailed) { _eventSink(@{ @@ -269,7 +344,58 @@ - (CVPixelBufferRef)copyPixelBuffer { while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) { pixelBuffer = _latestPixelBuffer; } - return pixelBuffer; + + return [self convertYUVImageToBGRA:pixelBuffer]; +} + +// Since video format was changed to kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange we have to +// convert image to a usable format for flutter textures. Which is kCVPixelFormatType_32BGRA. +- (CVPixelBufferRef)convertYUVImageToBGRA:(CVPixelBufferRef)pixelBuffer { + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + + vImage_YpCbCrToARGB infoYpCbCrToARGB; + vImage_YpCbCrPixelRange pixelRange; + pixelRange.Yp_bias = 16; + pixelRange.CbCr_bias = 128; + pixelRange.YpRangeMax = 235; + pixelRange.CbCrRangeMax = 240; + pixelRange.YpMax = 235; + pixelRange.YpMin = 16; + pixelRange.CbCrMax = 240; + pixelRange.CbCrMin = 16; + + vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4, &pixelRange, + &infoYpCbCrToARGB, kvImage420Yp8_CbCr8, + kvImageARGB8888, kvImageNoFlags); + + vImage_Buffer sourceLumaBuffer; + sourceLumaBuffer.data = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); + sourceLumaBuffer.height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); + sourceLumaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); + sourceLumaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); + + vImage_Buffer sourceChromaBuffer; + sourceChromaBuffer.data = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); + sourceChromaBuffer.height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); + sourceChromaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); + sourceChromaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); + + vImageConvert_420Yp8_CbCr8ToARGB8888(&sourceLumaBuffer, &sourceChromaBuffer, &_destinationBuffer, + &infoYpCbCrToARGB, NULL, 255, + kvImagePrintDiagnosticsToConsole); + + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + CVPixelBufferRelease(pixelBuffer); + + const uint8_t map[4] = {3, 2, 1, 0}; + vImagePermuteChannels_ARGB8888(&_destinationBuffer, &_conversionBuffer, map, kvImageNoFlags); + + CVPixelBufferRef newPixelBuffer = NULL; + CVPixelBufferCreateWithBytes(NULL, _conversionBuffer.width, _conversionBuffer.height, + kCVPixelFormatType_32BGRA, _conversionBuffer.data, + _conversionBuffer.rowBytes, NULL, NULL, NULL, &newPixelBuffer); + + return newPixelBuffer; } - (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { @@ -282,6 +408,7 @@ - (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments _eventSink = events; return nil; } + - (void)startVideoRecordingAtPath:(NSString *)path result:(FlutterResult)result { if (!_isRecording) { if (![self setupWriterForPath:path]) { @@ -321,6 +448,32 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result { } } +- (void)startImageStreamWithMessenger:(NSObject *)messenger { + if (!_isStreamingImages) { + FlutterEventChannel *eventChannel = + [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" + binaryMessenger:messenger]; + + _imageStreamHandler = [[FLTImageStreamHandler alloc] init]; + [eventChannel setStreamHandler:_imageStreamHandler]; + + _isStreamingImages = YES; + } else { + _eventSink( + @{@"event" : @"error", @"errorDescription" : @"Images from camera are already streaming!"}); + } +} + +- (void)stopImageStream { + if (_isStreamingImages) { + _isStreamingImages = NO; + _imageStreamHandler = nil; + } else { + _eventSink( + @{@"event" : @"error", @"errorDescription" : @"Images from camera are not streaming!"}); + } +} + - (BOOL)setupWriterForPath:(NSString *)path { NSError *error = nil; NSURL *outputURL; @@ -495,6 +648,12 @@ - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result }); [cam start]; } + } else if ([@"startImageStream" isEqualToString:call.method]) { + [_camera startImageStreamWithMessenger:_messenger]; + result(nil); + } else if ([@"stopImageStream" isEqualToString:call.method]) { + [_camera stopImageStream]; + result(nil); } else { NSDictionary *argsMap = call.arguments; NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue; diff --git a/packages/camera/lib/camera.dart b/packages/camera/lib/camera.dart index 153bbeb69b98..19028891ad84 100644 --- a/packages/camera/lib/camera.dart +++ b/packages/camera/lib/camera.dart @@ -1,8 +1,15 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + import 'dart:async'; +import 'dart:typed_data'; import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; +part 'camera_image.dart'; + final MethodChannel _channel = const MethodChannel('plugins.flutter.io/camera') ..invokeMethod('init'); @@ -10,6 +17,8 @@ enum CameraLensDirection { front, back, external } enum ResolutionPreset { low, medium, high } +typedef onLatestImageAvailable = Function(CameraImage image); + /// Returns the resolution preset as a String. String serializeResolutionPreset(ResolutionPreset resolutionPreset) { switch (resolutionPreset) { @@ -110,13 +119,15 @@ class CameraValue { this.previewSize, this.isRecordingVideo, this.isTakingPicture, + this.isStreamingImages, }); const CameraValue.uninitialized() : this( isInitialized: false, isRecordingVideo: false, - isTakingPicture: false); + isTakingPicture: false, + isStreamingImages: false); /// True after [CameraController.initialize] has completed successfully. final bool isInitialized; @@ -127,6 +138,9 @@ class CameraValue { /// True when the camera is recording (not the same as previewing). final bool isRecordingVideo; + /// True when images from the camera are being streamed. + final bool isStreamingImages; + final String errorDescription; /// The size of the preview in pixels. @@ -145,6 +159,7 @@ class CameraValue { bool isInitialized, bool isRecordingVideo, bool isTakingPicture, + bool isStreamingImages, String errorDescription, Size previewSize, }) { @@ -154,6 +169,7 @@ class CameraValue { previewSize: previewSize ?? this.previewSize, isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo, isTakingPicture: isTakingPicture ?? this.isTakingPicture, + isStreamingImages: isStreamingImages ?? this.isStreamingImages, ); } @@ -164,7 +180,8 @@ class CameraValue { 'isRecordingVideo: $isRecordingVideo, ' 'isInitialized: $isInitialized, ' 'errorDescription: $errorDescription, ' - 'previewSize: $previewSize)'; + 'previewSize: $previewSize, ' + 'isStreamingImages: $isStreamingImages)'; } } @@ -185,6 +202,7 @@ class CameraController extends ValueNotifier { int _textureId; bool _isDisposed = false; StreamSubscription _eventSubscription; + StreamSubscription _imageStreamSubscription; Completer _creatingCompleter; /// Initializes the camera on the device. @@ -276,6 +294,90 @@ class CameraController extends ValueNotifier { } } + /// Start streaming images from platform camera. + /// + /// Settings for capturing images on iOS and Android is set to always use the + /// latest image available from the camera and will drop all other images. + /// + /// When running continuously with [CameraPreview] widget, this function runs + /// best with [ResolutionPreset.low]. Running on [ResolutionPreset.high] can + /// have significant frame rate drops for [CameraPreview] on lower end + /// devices. + /// + /// Throws a [CameraException] if image streaming or video recording has + /// already started. + // TODO(bmparr): Add settings for resolution and fps. + Future startImageStream(onLatestImageAvailable onAvailable) async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'startImageStream was called on uninitialized CameraController.', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'startImageStream was called while a video is being recorded.', + ); + } + if (value.isStreamingImages) { + throw CameraException( + 'A camera has started streaming images.', + 'startImageStream was called while a camera was streaming images.', + ); + } + + try { + await _channel.invokeMethod('startImageStream'); + value = value.copyWith(isStreamingImages: true); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + const EventChannel cameraEventChannel = + EventChannel('plugins.flutter.io/camera/imageStream'); + _imageStreamSubscription = + cameraEventChannel.receiveBroadcastStream().listen( + (dynamic imageData) { + onAvailable(CameraImage._fromPlatformData(imageData)); + }, + ); + } + + /// Stop streaming images from platform camera. + /// + /// Throws a [CameraException] if image streaming was not started or video + /// recording was started. + Future stopImageStream() async { + if (!value.isInitialized || _isDisposed) { + throw CameraException( + 'Uninitialized CameraController', + 'stopImageStream was called on uninitialized CameraController.', + ); + } + if (value.isRecordingVideo) { + throw CameraException( + 'A video recording is already started.', + 'stopImageStream was called while a video is being recorded.', + ); + } + if (!value.isStreamingImages) { + throw CameraException( + 'No camera is streaming images', + 'stopImageStream was called when no camera is streaming images.', + ); + } + + try { + value = value.copyWith(isStreamingImages: false); + await _channel.invokeMethod('stopImageStream'); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + + _imageStreamSubscription.cancel(); + _imageStreamSubscription = null; + } + /// Start a video recording and save the file to [path]. /// /// A path can for example be obtained using @@ -299,6 +401,13 @@ class CameraController extends ValueNotifier { 'startVideoRecording was called when a recording is already started.', ); } + if (value.isStreamingImages) { + throw CameraException( + 'A camera has started streaming images.', + 'startVideoRecording was called while a camera was streaming images.', + ); + } + try { await _channel.invokeMethod( 'startVideoRecording', diff --git a/packages/camera/lib/camera_image.dart b/packages/camera/lib/camera_image.dart new file mode 100644 index 000000000000..2dd665ddb400 --- /dev/null +++ b/packages/camera/lib/camera_image.dart @@ -0,0 +1,124 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +part of 'camera.dart'; + +/// A single color plane of image data. +/// +/// The number and meaning of the planes in an image are determined by the +/// format of the Image. +class Plane { + Plane._fromPlatformData(Map data) + : bytes = data['bytes'], + bytesPerPixel = data['bytesPerPixel'], + bytesPerRow = data['bytesPerRow'], + height = data['height'], + width = data['width']; + + /// Bytes representing this plane. + final Uint8List bytes; + + /// The distance between adjacent pixel samples on Android, in bytes. + /// + /// Will be `null` on iOS. + final int bytesPerPixel; + + /// The row stride for this color plane, in bytes. + final int bytesPerRow; + + /// Height of the pixel buffer on iOS. + /// + /// Will be `null` on Android + final int height; + + /// Width of the pixel buffer on iOS. + /// + /// Will be `null` on Android. + final int width; +} + +/// Group of image formats that are comparable across Android and iOS platforms. +enum ImageFormatGroup { + /// The image format does not fit into any specific group. + unknown, + + /// Multi-plane YUV 420 format. + /// + /// This format is a generic YCbCr format, capable of describing any 4:2:0 + /// chroma-subsampled planar or semiplanar buffer (but not fully interleaved), + /// with 8 bits per color sample. + /// + /// On Android, this is `android.graphics.ImageFormat.YUV_420_888`. See + /// https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888 + /// + /// On iOS, this is `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`. See + /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers/kcvpixelformattype_420ypcbcr8biplanarvideorange?language=objc + yuv420, +} + +/// Describes how pixels are represented in an image. +class ImageFormat { + ImageFormat._fromPlatformData(this.raw) : group = _asImageFormatGroup(raw); + + /// Describes the format group the raw image format falls into. + final ImageFormatGroup group; + + /// Raw version of the format from the Android or iOS platform. + /// + /// On Android, this is an `int` from class `android.graphics.ImageFormat`. See + /// https://developer.android.com/reference/android/graphics/ImageFormat + /// + /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers. + /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc + final dynamic raw; +} + +ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) { + if (rawFormat == 35 || rawFormat == 875704438) { + return ImageFormatGroup.yuv420; + } else { + return ImageFormatGroup.unknown; + } +} + +/// A single complete image buffer from the platform camera. +/// +/// This class allows for direct application access to the pixel data of an +/// Image through one or more [Uint8List]. Each buffer is encapsulated in a +/// [Plane] that describes the layout of the pixel data in that plane. The +/// [CameraImage] is not directly usable as a UI resource. +/// +/// Although not all image formats are planar on iOS, we treat 1-dimensional +/// images as single planar images. +class CameraImage { + CameraImage._fromPlatformData(Map data) + : format = ImageFormat._fromPlatformData(data['format']), + height = data['height'], + width = data['width'], + planes = List.unmodifiable(data['planes'] + .map((dynamic planeData) => Plane._fromPlatformData(planeData))); + + /// Format of the image provided. + /// + /// Determines the number of planes needed to represent the image, and + /// the general layout of the pixel data in each [Uint8List]. + final ImageFormat format; + + /// Height of the image in pixels. + /// + /// For formats where some color channels are subsampled, this is the height + /// of the largest-resolution plane. + final int height; + + /// Width of the image in pixels. + /// + /// For formats where some color channels are subsampled, this is the width + /// of the largest-resolution plane. + final int width; + + /// The pixels planes for this image. + /// + /// The number of planes is determined by the format of the image. + final List planes; +} diff --git a/packages/camera/pubspec.yaml b/packages/camera/pubspec.yaml index 62e0f7212346..bd029dd59b47 100644 --- a/packages/camera/pubspec.yaml +++ b/packages/camera/pubspec.yaml @@ -1,7 +1,7 @@ name: camera description: A Flutter plugin for getting information about and controlling the camera on Android and iOS. Supports previewing the camera feed and capturing images. -version: 0.2.7 +version: 0.2.8 authors: - Flutter Team - Luigi Agosti