일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | 4 | |||
5 | 6 | 7 | 8 | 9 | 10 | 11 |
12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 | 21 | 22 | 23 | 24 | 25 |
26 | 27 | 28 | 29 | 30 | 31 |
- embedd
- V8 Engine
- java
- 안드로이드
- android log dump
- IMAGE
- appbarlayout
- FlexiblePageView
- sha1 convert hashkey
- Push
- ios framework
- 인증서 정보 뽑아내기
- Android
- ios
- IOS10
- JavaScript Engine
- v8 engine xcode build
- so file
- 공인인증서 만료일
- apache
- 공인인증서 정보
- Magnify Anim
- Objective C
- PageControl
- Android NDK시스템
- Google V8 Engine
- SO 파일
- apk 다운사이징
- apns
- Status Bar
- Today
- Total
caTea 블로그
[ios, objective-c] AVCaptureSession (실시간 카메라 이미지 캡쳐) 본문
VIewController.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h> //프레임워크입니다. 추가해주세요
@interface ViewController : UIViewController<UIImagePickerControllerDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>{
NSTimer *timer;
UIView *cameraView;
dispatch_queue_t sampleBufferCallbackQueue;
AVCaptureVideoDataOutput *output;
AVCaptureSession *session;
}
@property (nonatomic, strong) NSTimer *timer;
@property (nonatomic, strong) UIView *cameraView;
@property(nonatomic, strong) AVCaptureSession *session;
@property(nonatomic, readonly) dispatch_queue_t sampleBufferCallbackQueue;
@property(nonatomic, readonly) AVCaptureVideoDataOutput *output;
- (IBAction)showCamera:(id)sender;
@end
ViewController.m
#import "ViewController.h"
static int count=0;
static BOOL dispatchFlag = false;
@interface ViewController ()
@end
@implementation ViewController
@synthesize timer;
@synthesize cameraView;
@synthesize sampleBufferCallbackQueue;
@synthesize output;
@synthesize session;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
//Capture Session
session = [[AVCaptureSession alloc]init];
session.sessionPreset = AVCaptureSessionPresetPhoto;
//Add device
AVCaptureDevice *device =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//Input
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
if (!input)
{
NSLog(@"No Input");
}
[session addInput:input];
//Output
output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
output.videoSettings =
@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) };
//Preview Layer
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
cameraView = self.view;
previewLayer.frame = cameraView.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:previewLayer];
timer =[NSTimer scheduledTimerWithTimeInterval:2 target:self selector:@selector(snapshot) userInfo:nil repeats:YES];
//Start capture session
[session startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
//요기 변수로 프레임 이미지가 온다 맘대로 가공하도록 한다.
UIImage *image1 = [self imageFromSampleBuffer:sampleBuffer];
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (UIImage *)snapshot
{
//캡쳐 시작
if(dispatchFlag == false){
//이 코드가 실행시 captureOutput 함수가 실행됩니다 실행 속도는 동영상처럼 프레임이 찍히는 속도만큼입니다.
sampleBufferCallbackQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:sampleBufferCallbackQueue];
dispatchFlag = true;
}
count = count +1;
if(count == 7){ //14초가 지나면 자동 종료 코드
[timer invalidate];
[session stopRunning];
UIAlertView *endMessage = [[UIAlertView alloc] initWithTitle:@"End" message:@"Timer end!!!!" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[endMessage show];
}
// UIGraphicsBeginImageContextWithOptions(cameraView.bounds.size, YES, 0);
// [cameraView drawViewHierarchyInRect:cameraView.bounds afterScreenUpdates:YES];
// UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
// UIGraphicsEndImageContext();
UIImage *image = nil;
return image;
}
- (IBAction)showCamera:(id)sender {
cameraView = [[UIView alloc] init];
UIImagePickerController* cameraController = [[UIImagePickerController alloc] init];
cameraController.sourceType = UIImagePickerControllerSourceTypeCamera;
cameraView.frame = cameraController.cameraOverlayView.frame;
cameraController.cameraOverlayView = cameraView;
//cameraController.delegate = (id)self;
cameraController.allowsEditing = NO;
cameraController.showsCameraControls = NO;
// cameraController.view.userInteractionEnabled = NO;
//UIImage *image1= [self snapshot:cameraView];
// timer =[NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(snapshot) userInfo:nil repeats:YES];
[self presentModalViewController:cameraController animated:YES];
}
@end
- 코드가 많이 더럽습니다;;ㅋㅋ 회사라 정리할 시간이없어요 몰래 ;; ㅋㅋ
'macos | ios' 카테고리의 다른 글
NSURLSession 간편하게 사용하기 (2) | 2016.09.04 |
---|---|
디바이스 타입 가져오기 (0) | 2016.07.12 |
ios json을 NSDictionary로 또는 그반대로 변경 함수 (0) | 2015.06.17 |
[ios, objective-c] 자바의 onActivityResult 기능을 구현해보자 (0) | 2015.02.10 |
[ios, Objectiv_C]ABPeoplePickerNavigationController Image 가져오기 (0) | 2015.01.22 |