caTea 블로그

[ios, objective-c] AVCaptureSession (실시간 카메라 이미지 캡쳐) 본문

macos | ios

[ios, objective-c] AVCaptureSession (실시간 카메라 이미지 캡쳐)

ZaRas 2015. 1. 19. 16:29
반응형

VIewController.h


#import <UIKit/UIKit.h>

#import <AVFoundation/AVFoundation.h>   //프레임워크입니다. 추가해주세요



@interface ViewController : UIViewController<UIImagePickerControllerDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>{

    NSTimer *timer;

    UIView *cameraView;

    dispatch_queue_t sampleBufferCallbackQueue;

    AVCaptureVideoDataOutput *output;

    AVCaptureSession *session;

 

}

@property (nonatomic, strong) NSTimer *timer;

@property (nonatomic, strong) UIView *cameraView;

@property(nonatomic, strong) AVCaptureSession *session;

@property(nonatomic, readonly) dispatch_queue_t sampleBufferCallbackQueue;

@property(nonatomic, readonly) AVCaptureVideoDataOutput *output;


- (IBAction)showCamera:(id)sender;


@end


ViewController.m


#import "ViewController.h"


static int count=0;

static BOOL dispatchFlag = false;

@interface ViewController ()


@end


@implementation ViewController

@synthesize timer;

@synthesize cameraView;

@synthesize sampleBufferCallbackQueue;

@synthesize output;

@synthesize session;


- (void)viewDidLoad {

    [super viewDidLoad];

    // Do any additional setup after loading the view, typically from a nib.

    //Capture Session

    session = [[AVCaptureSession alloc]init];

    session.sessionPreset = AVCaptureSessionPresetPhoto;

    

    //Add device

    AVCaptureDevice *device =

    [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

  

    

    //Input

    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];

    

    if (!input)

    {

        NSLog(@"No Input");

    }

    

    [session addInput:input];

    

    //Output

    output = [[AVCaptureVideoDataOutput alloc] init];

    [session addOutput:output];

    output.videoSettings =

    @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) };

   

    

    

    //Preview Layer

    AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];

    cameraView = self.view;

    previewLayer.frame = cameraView.bounds;

    previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

    [self.view.layer addSublayer:previewLayer];

    

    

    timer =[NSTimer scheduledTimerWithTimeInterval:2 target:self selector:@selector(snapshot) userInfo:nil repeats:YES];

    //Start capture session

    [session startRunning];

    

   

    

}



-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

//요기 변수로 프레임 이미지가 온다 맘대로 가공하도록 한다.

    UIImage *image1 = [self imageFromSampleBuffer:sampleBuffer];

   

}


// Create a UIImage from sample buffer data

- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer

{

    // Get a CMSampleBuffer's Core Video image buffer for the media data

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    // Lock the base address of the pixel buffer

    CVPixelBufferLockBaseAddress(imageBuffer, 0);

    

    // Get the number of bytes per row for the pixel buffer

    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

    

    // Get the number of bytes per row for the pixel buffer

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

    // Get the pixel buffer width and height

    size_t width = CVPixelBufferGetWidth(imageBuffer);

    size_t height = CVPixelBufferGetHeight(imageBuffer);

    

    // Create a device-dependent RGB color space

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    

    // Create a bitmap graphics context with the sample buffer data

    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,

                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

    // Create a Quartz image from the pixel data in the bitmap graphics context

    CGImageRef quartzImage = CGBitmapContextCreateImage(context);

    // Unlock the pixel buffer

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    

    // Free up the context and color space

    CGContextRelease(context);

    CGColorSpaceRelease(colorSpace);

    

    // Create an image object from the Quartz image

    //UIImage *image = [UIImage imageWithCGImage:quartzImage];

    UIImage *image =  [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];

    // Release the Quartz image

    CGImageRelease(quartzImage);

    

    return (image);

}


- (void)didReceiveMemoryWarning {

    [super didReceiveMemoryWarning];

    // Dispose of any resources that can be recreated.

}


- (UIImage *)snapshot

{

    //캡쳐 시작

    if(dispatchFlag == false){

        // 코드가 실행시 captureOutput 함수가 실행됩니다 실행 속도는 동영상처럼 프레임이 찍히는 속도만큼입니다.

        sampleBufferCallbackQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);

        [output setSampleBufferDelegate:self queue:sampleBufferCallbackQueue];

        

       

        dispatchFlag = true;

    }

    count = count +1;

    

    if(count ==  7){ //14초가 지나면 자동 종료 코드

        [timer invalidate];

        [session stopRunning];

        UIAlertView *endMessage = [[UIAlertView alloc] initWithTitle:@"End" message:@"Timer end!!!!" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];

        [endMessage show];

        

    }

   

   


//    UIGraphicsBeginImageContextWithOptions(cameraView.bounds.size, YES, 0);

//    [cameraView drawViewHierarchyInRect:cameraView.bounds afterScreenUpdates:YES];

//    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();

//    UIGraphicsEndImageContext();

    UIImage *image = nil;

    return image;

}

- (IBAction)showCamera:(id)sender {

    


    cameraView = [[UIView alloc] init];

    

    UIImagePickerController* cameraController = [[UIImagePickerController alloc] init];

    cameraController.sourceType = UIImagePickerControllerSourceTypeCamera;

    

    

    cameraView.frame = cameraController.cameraOverlayView.frame;

    cameraController.cameraOverlayView = cameraView;

    

    //cameraController.delegate = (id)self;

    cameraController.allowsEditing = NO;

    cameraController.showsCameraControls = NO;

   // cameraController.view.userInteractionEnabled = NO;

    

   //UIImage *image1= [self snapshot:cameraView];

   // timer =[NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(snapshot) userInfo:nil repeats:YES];


    [self presentModalViewController:cameraController animated:YES];

    

}

@end




- 코드가 많이 더럽습니다;;ㅋㅋ 회사라 정리할 시간이없어요 몰래 ;; ㅋㅋ


728x90