西海岸より

つらつらざつざつと

UIImagePickerControllerを使わないカメラ機能のサンプルView(iOS4以上)

カメラ機能を利用する際、iOS3系まではUIImagePickerControllerを使うしかなかったが、iOS4でカメラ機能のコアAPIが解放されて、いろいろとカスタマイズできるようになった。
ので、サンプルを書いてみる。

  • 見た目


    • 白い矩形を操作して、そのタイミングでキャプチャーメソッドをコールするとその矩形の画像を取得することが可能。
  • 構成
    • CameraView : カメラで映している画像をリアルタイムで表示するView。
    • FrameRectView : カメラ画像の上に表示されている白い矩形。ドラッグして範囲指定をする。

ソース

  • CameraView.h : カメラを表示するViewクラス
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
#import <QuartzCore/QuartzCore.h>
#import <AssetsLibrary/AssetsLibrary.h>
#include <math.h>

#import "FrameRectView.h"

@class CameraView;

@protocol CameraViewDelegate<NSObject>

@optional
- (void)captureEnded:(CameraView *)cameraView;
@end

@interface CameraView : UIView
<AVCaptureVideoDataOutputSampleBufferDelegate> {
 @protected
  UIImage *imageBuffer;
  BOOL requireTakePhoto;
  BOOL processingTakePhoto;
  void *bitmap;

#ifndef __i386__
  AVCaptureSession *captureSession;
  dispatch_queue_t queue;
#endif
  
  UIImage *capturedImage;
  id delegate;
}

// Initializer
- (id)initWithFrame:(CGRect)frame delegate:(id)delegate;

// Open camera session
- (void)openCameraSession;

// Close camera session
- (void)closeCameraSession;

// Do capture
- (void)doCapture;

// capture image
- (UIImage *)capturedImage;

// set frame mode
@property (nonatomic, assign) BOOL frameMode;

// set delegate
@property (nonatomic, assign) id delegate;

@end
  • CameraView.m
#import "CameraView.h"

static inline double radians (double degrees) {return degrees * M_PI/180;}

// rotation angle
typedef enum RotationAngle_ {
  RotationAngle90 = 90,
  RotationAngle180 = 180,
  RotationAngle270 = 270,
} RotationAngle;

@interface CameraView ()

- (UIImage *)rotateImage:(UIImage *)img angle:(RotationAngle)angle;
- (void)callCaptureEnded;

@property (nonatomic, retain) FrameRectView *frameRectView;

@property (assign, readonly) size_t cameraWidth;
@property (assign, readonly) size_t cameraHeight;
@property (readonly) NSString * cameraSessionPreset;
@property (nonatomic, retain) UIImage *imageBuffer;
@property (retain) UIImage *capturedImage;

#ifndef __i386__
@property (nonatomic, retain) AVCaptureSession *captureSession;
#endif

@end

@implementation CameraView

#pragma mark -
#pragma mark public methods

- (id)initWithFrame:(CGRect)frame delegate:(id)adelegate {
  self = [super initWithFrame:frame];
  [self setDelegate:adelegate];

  // set frame rect view
  {
    FrameRectView *frView = [[FrameRectView alloc] initWithFrame:frame
                                                     withCenterY:self.frame.size.height / 2];
    [frView autorelease];
    [self setFrameRectView:frView];
    [self addSubview:frView];
  }
  
  return self;
}

- (void)openCameraSession {
  requireTakePhoto = NO;
  processingTakePhoto = NO;
  
  // Initialize image buffer 
  // ---------
	size_t width = self.cameraWidth;
	size_t height = self.cameraHeight;
  
	bitmap = NSZoneMalloc(self.zone, width * height * 4);
	CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
	CGDataProviderRef dataProviderRef = CGDataProviderCreateWithData(NULL, bitmap, width * height * 4, NULL);
	CGImageRef cgImage = CGImageCreate(width, height, 8, 32, width * 4, 
                                     colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst,
                                     dataProviderRef, NULL, 0, kCGRenderingIntentDefault);
	self.imageBuffer = [UIImage imageWithCGImage:cgImage];
  CGImageRelease(cgImage);
	CGColorSpaceRelease(colorSpace);
	CGDataProviderRelease(dataProviderRef);
  // ---------
  
#ifndef __i386__
  
	// Start session open
	self.captureSession = [[[AVCaptureSession alloc] init] autorelease];
  
  // Select device
  AVCaptureDevice *videoCaptureDevice = nil;
	NSArray *cameraArray = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
	for (AVCaptureDevice *camera in cameraArray) {
		if (camera.position == AVCaptureDevicePositionBack) {
			videoCaptureDevice = camera;
		}
	}
  
	// Set video stream
  // ---------
	NSError *error = nil;
	AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error];
	if (videoInput) {
		[self.captureSession addInput:videoInput];

		// config (session)
		[self.captureSession beginConfiguration];
		self.captureSession.sessionPreset = self.cameraSessionPreset;
		[self.captureSession commitConfiguration];
		
    // config (input)
    // -- set video mode
		if ([videoCaptureDevice lockForConfiguration:&error]) {
      
      // AVMode -> AutoFocus
			if ([videoCaptureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
				videoCaptureDevice.focusMode = AVCaptureFocusModeContinuousAutoFocus;
			}else {
				if ([videoCaptureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
					videoCaptureDevice.focusMode = AVCaptureFocusModeAutoFocus;
				}
			}
      
      // Flash -> auto mode
			if ([videoCaptureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
				videoCaptureDevice.flashMode = AVCaptureFlashModeAuto;
			}
      
      // Exposure -> auto exposure
			if ([videoCaptureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
				videoCaptureDevice.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
			}
      
      // White balance -> auto white balance
			if ([videoCaptureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
				videoCaptureDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;			}
      
      // torch mode -> off
			if ([videoCaptureDevice isTorchModeSupported:AVCaptureTorchModeOff]){
				videoCaptureDevice.torchMode = AVCaptureTorchModeOff;
			}
      
			[videoCaptureDevice unlockForConfiguration];
      
		}else {
			NSLog(@"ERROR:%@", error);
		}
    // ---------
    
		// Get preview layer and set self layer.
    // ---------
		AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
		previewLayer.automaticallyAdjustsMirroring = NO;
		previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;	// ぴっちり全面
		previewLayer.frame = self.bounds;
		[self.layer insertSublayer:previewLayer atIndex:0];
    // ---------
    
	}else {
		NSLog(@"ERROR:%@", error);
	}
  
  // Get video data  (Code Snippet SP16)
  // ---------
	AVCaptureVideoDataOutput *videoOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
	if(videoInput){
		videoOutput.videoSettings = [NSDictionary dictionaryWithObject:
                                 [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                                            forKey:(id)kCVPixelBufferPixelFormatTypeKey];
		videoOutput.minFrameDuration = CMTimeMake(1, 20);	// 20fps
		videoOutput.alwaysDiscardsLateVideoFrames = YES;
		queue = dispatch_queue_create("jp.mmasashi.camera.CameraView", NULL);
		[videoOutput setSampleBufferDelegate:self queue:queue];
		dispatch_release(queue);
		[self.captureSession addOutput:videoOutput];
	}
  // ---------
  
	// Start video session
	if(videoInput){
		[self.captureSession startRunning];
	}
#endif
}

- (void)closeCameraSession {
#ifndef __i386__
	[self.captureSession stopRunning];
	for (AVCaptureOutput *output in self.captureSession.outputs) {
		[self.captureSession removeOutput:output];
	}
	for (AVCaptureInput *input in self.captureSession.inputs) {
		[self.captureSession removeInput:input];
	}
	self.captureSession = nil;
#endif
	NSZoneFree(self.zone, bitmap);
	bitmap = NULL;
}

- (void)doCapture {
  if (!processingTakePhoto) {
    requireTakePhoto = YES;
  }
}

#pragma mark -
#pragma mark camera parameter

//- (size_t)cameraWidth {
//  return 640;
//}
//- (size_t)cameraHeight {
//  return 480;
//}
//- (NSString *)cameraSessionPreset {
//  return AVCaptureSessionPreset640x480;
//}
- (size_t)cameraWidth {
  return 1280;
}
- (size_t)cameraHeight {
  return 720;
}
- (NSString *)cameraSessionPreset {
#ifndef __i386__
  return AVCaptureSessionPreset1280x720;
#else
  return nil;
#endif
}

#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate

#ifndef __i386__
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
       fromConnection:(AVCaptureConnection *)connection {

	if (requireTakePhoto) {
		requireTakePhoto = NO;
		processingTakePhoto = YES;
		CVPixelBufferRef pixbuff = CMSampleBufferGetImageBuffer(sampleBuffer);
		if(CVPixelBufferLockBaseAddress(pixbuff, 0) == kCVReturnSuccess){

			memcpy(bitmap, CVPixelBufferGetBaseAddress(pixbuff), self.cameraWidth * self.cameraHeight * 4);
      
      // Rotate image
      UIImage *rotatedImage = [self rotateImage:self.imageBuffer angle:RotationAngle270];
      
      UIImage *capImage = rotatedImage;
      
      if ([self frameMode]) {
        CGRect stRect = self.frameRectView.targetFrameRect;

        // Get scale rate 
        CGFloat rateW = 1.0f * self.cameraHeight / self.frame.size.width;
        CGFloat rateH = 1.0f * self.cameraWidth / self.frame.size.height;
        CGFloat rateAll = (rateW < rateH) ? rateW : rateH;
        
        // Crop image
        CGFloat hiddenWidth = (self.frame.size.height / self.cameraWidth) * self.cameraHeight - self.frame.size.width;
        CGFloat hiddenHeight = (self.frame.size.width / self.cameraHeight) * self.cameraWidth - self.frame.size.height;
        
        hiddenWidth = (hiddenWidth < 0) ? 0 : hiddenWidth;
        hiddenHeight = (hiddenHeight < 0) ? 0 : hiddenHeight;
      
        CGRect cropRect = CGRectMake(stRect.origin.x * rateAll + hiddenWidth * rateAll/2,
                                     stRect.origin.y * rateAll + hiddenHeight * rateAll/2, 
                                     stRect.size.width * rateAll,
                                     stRect.size.height * rateAll);
        
        CGImageRef croppedImage = CGImageCreateWithImageInRect(rotatedImage.CGImage, cropRect);
        capImage = [UIImage imageWithCGImage:croppedImage];
        CGImageRelease(croppedImage);
      }
      
      [self performSelectorOnMainThread:@selector(endedCaptureCameraImage:)
                             withObject:capImage
                          waitUntilDone:NO];
      
			CVPixelBufferUnlockBaseAddress(pixbuff, 0);
		}
	}
}
#endif


- (void)endedCaptureCameraImage:(UIImage *)uiimage {
  processingTakePhoto = NO;
  [self setCapturedImage:uiimage];
  [self callCaptureEnded];
}

#pragma mark - frame mode 

- (void)setFrameMode:(BOOL)frameMode {
  if (frameMode) {
    [self.frameRectView setHidden:NO];
  } else {
    [self.frameRectView setHidden:YES];
  }
}

- (BOOL)frameMode {
  return [self.frameRectView isHidden] ? NO : YES;
}

#pragma mark -
#pragma delegate methods

- (void)callCaptureEnded {  
  NSLog(@"IN %s", __func__);
  if ([delegate respondsToSelector:@selector(captureEnded:)]) {
    [delegate performSelector:@selector(captureEnded:) withObject:self];
  }

}

#pragma mark -
#pragma utlity methods

// rotate image
- (UIImage *) rotateImage:(UIImage *)img angle:(RotationAngle)angle
{
  CGImageRef imgRef = [img CGImage];
  CGContextRef context;
  
  switch (angle) {
    case RotationAngle90:
      UIGraphicsBeginImageContext(CGSizeMake(img.size.height, img.size.width));
      context = UIGraphicsGetCurrentContext();
      CGContextTranslateCTM(context, img.size.height, img.size.width);
      CGContextScaleCTM(context, 1.0, -1.0);
      CGContextRotateCTM(context, M_PI/2.0);
      break;
    case RotationAngle180:
      UIGraphicsBeginImageContext(CGSizeMake(img.size.width, img.size.height));
      context = UIGraphicsGetCurrentContext();
      CGContextTranslateCTM(context, img.size.width, 0);
      CGContextScaleCTM(context, 1.0, -1.0);
      CGContextRotateCTM(context, -M_PI);
      break;
    case RotationAngle270:
      UIGraphicsBeginImageContext(CGSizeMake(img.size.height, img.size.width));
      context = UIGraphicsGetCurrentContext();
      CGContextScaleCTM(context, 1.0, -1.0);
      CGContextRotateCTM(context, -M_PI/2.0);
      break;
    default:
      NSLog(@"you can select an angle of 90, 180, 270");
      return nil;
  } 
  
  CGContextDrawImage(context, CGRectMake(0, 0, img.size.width, img.size.height), imgRef);
  UIImage *ret = UIGraphicsGetImageFromCurrentImageContext(); 
  
  UIGraphicsEndImageContext();
  return ret;
}

#pragma mark -

- (void)dealloc {
  self.imageBuffer = nil;
  self.capturedImage = nil;
#ifndef __i386__
	self.captureSession = nil;
#endif
  
  [self setFrameRectView:nil];
  [super dealloc];
}

@synthesize imageBuffer;
@synthesize capturedImage;

#ifndef __i386__
@synthesize captureSession;
#endif

@synthesize delegate;

@synthesize frameRectView;

@end
  • FrameRectView.h : 撮影対象をフレームで指定するView
#import <UIKit/UIKit.h>

@interface FrameRectView : UIView {
 @private
  CGFloat centerY;
  CGRect targetFrameRect;
  
  UIColor *strokeColor;
  UIColor *fillColor;
  
  // for dragging
  CGPoint dragStartPoint;
  NSInteger direcX;
  NSInteger direcY;
}

// Initializer
- (id)initWithFrame:(CGRect)frame withCenterY:(CGFloat)y;

// Get target frame rect
@property (readonly) CGRect targetFrameRect;

@end
  • FrameRectView.m
#import "FrameRectView.h"

static const CGFloat kDefaultRectMinWidth = 50.0f;
static const CGFloat kDefaultRectMinHeight = 30.0f;

static const CGFloat kDefaultRectWidth = 220.0f;
static const CGFloat kDefaultRectHeight = 50.0f;
static const CGFloat kDefaultStrokeSize = 5.0f;


@interface FrameRectView ()

- (void)updateRectImage;
@property (nonatomic, retain) UIColor *strokeColor;
@property (nonatomic, retain) UIColor *fillColor;

@end


@implementation FrameRectView

#pragma mark -

- (id)initWithFrame:(CGRect)frame withCenterY:(CGFloat)y {
  self = [super initWithFrame:frame];
  if (self) {
    [self setBackgroundColor:[UIColor clearColor]];
    
    // -- set position parameter
    {
      CGFloat startX = frame.size.width / 2 - kDefaultRectWidth / 2;
      
      if (y > 0) {
        centerY = y;
      } else {
        centerY = frame.size.height / 2;
      }
      
      targetFrameRect = CGRectMake(startX, centerY - kDefaultRectHeight / 2,
                                   kDefaultRectWidth, kDefaultRectHeight);
    }
    
    // -- set color parameter
    {
      [self setStrokeColor:[UIColor colorWithRed:1.0f
                                           green:1.0f
                                            blue:1.0f
                                           alpha:1.0f]];
      [self setFillColor:[UIColor colorWithRed:1.0f
                                         green:1.0f
                                          blue:1.0f
                                         alpha:0.2f]];
    }
    
    // -- set gesturerecogonizer
    {
      UIGestureRecognizer *dragGesture = [[UIPanGestureRecognizer alloc] 
                                          initWithTarget:self
                                          action:@selector(draggedView:)];
      [self addGestureRecognizer:dragGesture];
      [dragGesture release];
    }
  }
  return self;
}

#pragma mark -

- (void) drawRect : (CGRect)rect {
  // create bezierPath instance
  [self updateRectImage];
}


#pragma mark -
#pragma mark touch handling

- (void)draggedView:(id)sender {
  UIPanGestureRecognizer *pan = (UIPanGestureRecognizer *)sender;
  CGPoint location = [pan locationInView:self];
  
  if (pan.state == UIGestureRecognizerStateBegan) {
    dragStartPoint = location;
    direcX = (self.frame.size.width / 2 < location.x) ? 1 : -1;
    direcY = (centerY < location.y) ? 1 : -1; 
    return;
  }
  if (pan.state == UIGestureRecognizerStateEnded) {
    dragStartPoint = CGPointZero;
    direcX = 0;
    direcY = 0;
    return;
  }
  
  CGFloat diffX = location.x - dragStartPoint.x;
  CGFloat diffY = location.y - dragStartPoint.y;
  
  CGRect newRect;
  newRect.size.width = targetFrameRect.size.width + direcX * diffX * 2;
  newRect.size.height = targetFrameRect.size.height + direcY * diffY * 2;
  
  if ( newRect.size.width > self.frame.size.width ) {
    newRect.size.width = self.frame.size.width;
  } else if (newRect.size.width < kDefaultRectMinWidth) {
    newRect.size.width = kDefaultRectMinWidth;
  }
  
  if ( newRect.size.height > centerY * 2 ) {
    newRect.size.height = centerY * 2;
  } else if ( newRect.size.height < kDefaultRectMinHeight ) {
    newRect.size.height = kDefaultRectMinHeight;
  }
  
  newRect.origin.x = self.frame.size.width / 2 - newRect.size.width / 2;
  newRect.origin.y = centerY - newRect.size.height / 2;
  
  targetFrameRect = newRect;
  [self setNeedsDisplay];
  
  dragStartPoint = location;
}


#pragma mark -
#pragma mark private methods

- (void)updateRectImage {
  UIBezierPath *aPath = [UIBezierPath bezierPath];
  
  // set render color and style
  [self.strokeColor setStroke];
  [self.fillColor setFill];
  aPath.lineWidth = 2;
  
  // set start point
  {
    CGFloat originX = targetFrameRect.origin.x;
    CGFloat originY = targetFrameRect.origin.y;
    CGFloat width = targetFrameRect.size.width;
    CGFloat height = targetFrameRect.size.height;
    
    [aPath moveToPoint:CGPointMake(originX, originY)];
    [aPath addLineToPoint:CGPointMake(originX+width, originY)];
    [aPath addLineToPoint:CGPointMake(originX+width, originY+height)];
    [aPath addLineToPoint:CGPointMake(originX, originY+height)];
    [aPath addLineToPoint:CGPointMake(originX, originY)];
  }
  
  // close path so that successed to create pentagon.
  [aPath closePath];
  
  //rendering
  [aPath stroke];
  [aPath fill];
}

#pragma mark -

- (void)dealloc {
  [strokeColor release];
  [fillColor release];
  [super dealloc];
}

@synthesize strokeColor;
@synthesize fillColor;

@synthesize targetFrameRect;

@end

使い方サンプル

SHOTボタンを押すと、矩形の画像がキャプチャされるだけのサンプル。

  • CameraTestViewController.h
#import <UIKit/UIKit.h>
#import "CameraView.h"

@interface CameraTestViewController : UIViewController
<CameraViewDelegate> {
 @private
}

@property (nonatomic, retain) CameraView *cameraView;

@end
  • CameraTestViewController.m
#import "CameraTestViewController.h"

@implementation CameraTestViewController

#pragma mark - View lifecycle

- (void)viewDidLoad {
  // start camera view
  {
    CameraView *cmv = [[CameraView alloc] initWithFrame:CGRectMake(0, 0, 320, 460) delegate:self];
    [self setCameraView:cmv];
    [self.view addSubview:cmv];
    [cmv release];
  }
    
  // add button
  {
    UIButton *btn = [UIButton buttonWithType:UIButtonTypeRoundedRect];
    [btn setFrame:CGRectMake(120, 390, 90, 44)];
    [btn setTitle:@"SHOT!!" forState:UIControlStateNormal];
    [btn addTarget:self action:@selector(shutterTapped:) forControlEvents:UIControlEventTouchUpInside];
    [self.view addSubview:btn];
  }
}

- (void)viewWillAppear:(BOOL)animated {
  [super viewWillAppear:animated];

  [self.cameraView openCameraSession];
}

- (void)viewDidDisappear:(BOOL)animated {
  [self.cameraView closeCameraSession];
  
  [super viewDidDisappear:animated];
}

- (void)shutterTapped:(id)sender {
  NSLog(@"%s IN", __func__);
  [self.cameraView doCapture]; 

  // 要シャッター音!!
}

- (void)captureEnded:(CameraView *)cameraView {
  NSLog(@"%@", [[cameraView capturedImage] description]);

  // ここでキャプチャした画像を表示するなど処理を行う
}

- (void)dealloc {
  [self setCameraView:nil];
  [super dealloc];
}

@synthesize cameraView;

@end

依存するFramework

  • CoreMedia.framework
  • CoreVideo.framework
  • AVFoundation.framework



参考

iOS4プログラミングブック

iOS4プログラミングブック