多媒体编程——ios摄像头图像抓取工具类

工具类提供预览图像画面,自动处理旋转,并且以主动方式抓取图像(这样帧率可以无限大)

系统的接口多是异步接收图像,像我这种强迫症怎么受得了,必须吧被动接收图像的方式改成主动抓取。


头文件


#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>

//这些比例都是4:3的比例。
typedef enum TKVideoFrameSize
{
    tkVideoFrame480x360     = 480  << 16 | 360,
    tkVideoFrame720x540     = 720  << 16 | 540, //用这个分辨率,效率会快很多。
}TKVideoFrameSize;



@interface TKVideoCapture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>

- (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type ;
- (bool) destory;

- (bool) start ;
- (bool) stop ;

//返回 字节顺序 BGRA BGRA 的图像数据。
- (uint8_t*) get_image_rgb32:(uint32_t*)length ;

@end




实现文件:(里面用到了那个Lock可以去上一篇文章找)


#import "TKVideoCapture.h"
#import <UIKit/UIKit.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
#import "TKLock.h"

@interface TKVideoCapture ()
{
    TKVideoFrameSize            _frametype      ;
    UIView*                     _preview        ;
    AVCaptureSession*           _captureSession ;
    AVCaptureVideoPreviewLayer* _capturePreview ;
    AVCaptureVideoDataOutput *  _captureOutput  ;
    AVCaptureDevice*            _captureDevice  ;
    AVCaptureDeviceInput*       _captureInput   ;
    
    uint8_t*                    _buffer_temp    ; //每一帧数据都存储到这个缓存中
    uint8_t*                    _buffer_obox    ; //需要使用时,从tempbuf 拷贝过来。
    CGRect                      _subImageRect   ; //子图片的位置。
    
    TKLock*                     _buffer_lock    ;
}

@end


@implementation TKVideoCapture

- (void) do_create
{
    self->_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] ;
    self->_captureInput  = [AVCaptureDeviceInput deviceInputWithDevice:self->_captureDevice  error:nil];
    self->_captureOutput = [[AVCaptureVideoDataOutput alloc] init];
    
    if(self->_captureOutput)
        [self->_captureOutput setAlwaysDiscardsLateVideoFrames:true];
    
    dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
    [self->_captureOutput setSampleBufferDelegate:self queue:queue];
    
    dispatch_release(queue);
    
    
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    
    [self->_captureOutput setVideoSettings:videoSettings];
    self->_captureSession = [[AVCaptureSession alloc] init];
    
    uint16_t width  = (uint16_t)(((uint32_t)_frametype) >> 16) ;
    uint16_t height = (uint16_t)(((uint32_t)_frametype) & 0xFFFF) ;
    
    _buffer_temp = (uint8_t*)malloc(width * height * 4);
    _buffer_obox = (uint8_t*)malloc(width * height * 4);
    
    //0.75是预定比例
    switch (_frametype) {
        case tkVideoFrame480x360:
        {
            _captureSession.sessionPreset = AVCaptureSessionPreset640x480 ;
            _subImageRect = CGRectMake((640-360)/2, 0, 360, 480);
            break;
        }
        case tkVideoFrame720x540:
        {
            _captureSession.sessionPreset = AVCaptureSessionPresetiFrame1280x720 ;
            _subImageRect = CGRectMake((1280-540)/2, 0, 540, 720);
            break;
        }
        default:
            break;
    }
    
    if(self->_captureInput != nil)
        [self->_captureSession addInput:self->_captureInput];
    
    [self->_captureSession addOutput:self->_captureOutput];
    
    self->_capturePreview = [AVCaptureVideoPreviewLayer layerWithSession: self->_captureSession];
    self->_capturePreview.frame = self->_preview.bounds;//CGRectMake(100, 0, 100, 100);
    self->_capturePreview.videoGravity = AVLayerVideoGravityResizeAspectFill;
    self->_capturePreview.connection.videoOrientation = [self getOrientation] ;
    
    [self->_preview.layer addSublayer: self->_capturePreview];
    
    _buffer_lock = [[TKLock alloc] init];
    [_buffer_lock open];
}

- (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type
{
    self->_frametype     = type ;
    self->_preview       = preview ;
    
    [self performSelectorOnMainThread:@selector(do_create) withObject:self waitUntilDone:true];
    
    return true ;
}

- (AVCaptureVideoOrientation) getOrientation
{
    UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation ;
    switch(orientation)
    {
        case UIInterfaceOrientationPortrait: return AVCaptureVideoOrientationPortrait;
        case UIInterfaceOrientationPortraitUpsideDown: return AVCaptureVideoOrientationPortraitUpsideDown;
        case UIInterfaceOrientationLandscapeLeft: return AVCaptureVideoOrientationLandscapeLeft;
        case UIInterfaceOrientationLandscapeRight: return AVCaptureVideoOrientationLandscapeRight;
    }
    return AVCaptureVideoOrientationLandscapeLeft ;
}

- (void) do_destory
{
    [_buffer_lock close];
    [_buffer_lock release];
    _buffer_lock = nil ;
    
    free(_buffer_temp);
    free(_buffer_obox);
    _buffer_temp = NULL ;
    _buffer_obox = NULL ;
    
    [self->_captureSession stopRunning];
    [self->_capturePreview removeFromSuperlayer];
    [self->_captureOutput  release];
    [self->_captureSession release];
    self->_captureSession = nil ;
    self->_capturePreview = nil ;
    self->_captureOutput  = nil ;
    self->_captureDevice  = nil ;
    self->_captureInput   = nil ;
    self->_preview        = nil ;
}

- (bool) destory
{
    [self performSelectorOnMainThread:@selector(do_destory) withObject:self waitUntilDone:true];
    return true ;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t* baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    
    size_t width  = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL);
    
    CGImageRef imageRef = CGImageCreate(width, height, 8, 32, bytesPerRow, colorSpace,
                                        kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst,
                                        provider, NULL, false, kCGRenderingIntentDefault);
    
    CGImageRef subImageRef = CGImageCreateWithImageInRect(imageRef, _subImageRect);
    
    size_t subWidth  = _subImageRect.size.height ;
    size_t subHeight = _subImageRect.size.width  ;
    
    CGContextRef context = CGBitmapContextCreate(NULL, subWidth, subHeight,
                                  CGImageGetBitsPerComponent(subImageRef), 0,
                                  CGImageGetColorSpace(subImageRef),
                                  CGImageGetBitmapInfo(subImageRef));
    

    CGContextTranslateCTM(context, 0, subHeight);
    CGContextRotateCTM(context, -M_PI/2);

    CGContextDrawImage(context, CGRectMake(0, 0, subHeight, subWidth), subImageRef);

    uint8_t* data = (uint8_t*)CGBitmapContextGetData(context);
    
    [_buffer_lock lock];
    memcpy(_buffer_temp, data, subWidth * subHeight * 4);
    [_buffer_lock unlock];
    
    CGContextRelease(context);
    CGImageRelease(imageRef);
    CGImageRelease(subImageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}

- (void) do_start
{
    [self->_captureSession startRunning];
}

- (void) do_stop
{
    [self->_captureSession stopRunning];
}

- (bool) start
{
    [self performSelectorOnMainThread:@selector(do_start) withObject:self waitUntilDone:true];
    return true ;
}
- (bool) stop
{
    [self performSelectorOnMainThread:@selector(do_stop) withObject:self waitUntilDone:true];
    return true ;
}

- (uint8_t*) get_image_rgb32:(uint32_t*)length
{
    uint16_t width  = (uint16_t)(((uint32_t)_frametype) >> 16) ;
    uint16_t height = (uint16_t)(((uint32_t)_frametype) & 0xFFFF) ;
    
    //从摄像头输出数据采集数据
    [_buffer_lock lock];
    memcpy(_buffer_obox, _buffer_temp, width * height * 4);
    [_buffer_lock unlock];
    
    if(length)
        *length = width * height * 4 ;
    
    return _buffer_obox ;
}


@end



多媒体编程——ios摄像头图像抓取工具类,,5-wow.com

郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。