你好,歡迎來到IOS教程網

 Ios教程網 >> IOS訊息 >> 關於IOS >> iOS Example:SquareCam分析

iOS Example:SquareCam分析

編輯:關於IOS

[cpp] 
// used for KVO observation of the @"capturingStillImage" property to perform flash bulb animation  
static const NSString *AVCaptureStillImageIsCapturingStillImageContext = @"AVCaptureStillImageIsCapturingStillImageContext"; 
 
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; 
 
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size); 
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size)  

    /**
     CVPixelBuffer(core video pixel buffer): 指的是主內存中的圖片緩存,用來保存圖片像素數據。應用程序在產生圖片幀、解壓縮視頻數據或調用Core Image的時候可以調用此對象
     **/ 
    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel; 
    CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 ); 
    CVPixelBufferRelease( pixelBuffer ); 

 
 
//用給定的pixel buffer創建CGImage對象, pixel buffer必須為未壓縮的kCVPixelFormatType_32ARGB 或者 kCVPixelFormatType_32BGRA  
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut); 
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut)  
{    
    OSStatus err = noErr; 
    OSType sourcePixelFormat; 
    size_t width, height, sourceRowBytes; 
    void *sourceBaseAddr = NULL; 
    CGBitmapInfo bitmapInfo; 
    CGColorSpaceRef colorspace = NULL; 
     
    //一些Quartz程序直接提供block數據給程序,而不是從內存中讀取,CGDataProviderRef允許用戶以這種方式提供給Quartz數據。(等於是Quartz定義的一種數據格式)  
    CGDataProviderRef provider = NULL; 
    CGImageRef image = NULL; 
     
    sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer ); 
    if ( kCVPixelFormatType_32ARGB == sourcePixelFormat ) 
        bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipFirst; 
    else if ( kCVPixelFormatType_32BGRA == sourcePixelFormat ) 
        bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst; 
    else 
        return -95014; // only uncompressed pixel formats  
     
     
    //獲取pixel buffer中數據屬性  
    sourceRowBytes = CVPixelBufferGetBytesPerRow( pixelBuffer ); 
    width = CVPixelBufferGetWidth( pixelBuffer ); 
    height = CVPixelBufferGetHeight( pixelBuffer ); 
     
    CVPixelBufferLockBaseAddress( pixelBuffer, 0 ); 
     
    //獲取pixel buffer的其實地址?直接內存操作了啊? 靠  
    sourceBaseAddr = CVPixelBufferGetBaseAddress( pixelBuffer ); 
     
    colorspace = CGColorSpaceCreateDeviceRGB(); 
     
    CVPixelBufferRetain( pixelBuffer ); 
     
    //將Data轉換為Quartz直接訪問的數據類型,目測其邏輯為制定數據內容,給定起始地址、長度等信息。  
    provider = CGDataProviderCreateWithData( (void *)pixelBuffer, sourceBaseAddr, sourceRowBytes * height, ReleaseCVPixelBuffer); 
    image = CGImageCreate(width, height, 8, 32, sourceRowBytes, colorspace, bitmapInfo, provider, NULL, true, kCGRenderingIntentDefault); 
     
bail: 
    if ( err && image ) { 
        CGImageRelease( image ); 
        image = NULL; 
    } 
    if ( provider ) CGDataProviderRelease( provider ); 
    if ( colorspace ) CGColorSpaceRelease( colorspace ); 
    *imageOut = image; 
    return err; 

 
// utility used by newSquareOverlayedImageForFeatures for   
static CGContextRef CreateCGBitmapContextForSize(CGSize size); 
static CGContextRef CreateCGBitmapContextForSize(CGSize size) 

    CGContextRef    context = NULL; 
    CGColorSpaceRef colorSpace;   //  
    int             bitmapBytesPerRow; 
     
    bitmapBytesPerRow = (size.width * 4); 
     
    colorSpace = CGColorSpaceCreateDeviceRGB(); 
    context = CGBitmapContextCreate (NULL, 
                                     size.width, 
                                     size.height, 
                                     8,      // bits per component  
                                     bitmapBytesPerRow, 
                                     colorSpace, 
                                     kCGImageAlphaPremultipliedLast); 
    CGContextSetAllowsAntialiasing(context, NO); 
    CGColorSpaceRelease( colorSpace ); 
    return context; 

 
#pragma mark-  
 
@interface UIImage (RotationMethods) 
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees; 
@end 
 
@implementation UIImage (RotationMethods) 
 
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees 
{    
    // calculate the size of the rotated view's containing box for our drawing space  
    UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)]; 
     
    //CGAffineTransform 結構體用於旋轉一個坐標系統,  
    CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees)); 
    rotatedViewBox.transform = t; 
    CGSize rotatedSize = rotatedViewBox.frame.size; 
    [rotatedViewBox release]; 
     
    // Create the bitmap context  
    UIGraphicsBeginImageContext(rotatedSize); //創建一個bitmap類型的graphic context,並將其置為當前context  
    CGContextRef bitmap = UIGraphicsGetCurrentContext();  
     
    // Move the origin to the middle of the image so we will rotate and scale around the center.  
    CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2); 
     
    //   // Rotate the image context  
    CGContextRotateCTM(bitmap, DegreesToRadians(degrees)); 
     
    // Now, draw the rotated/scaled image into the context  
    CGContextScaleCTM(bitmap, 1.0, -1.0); 
    CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]); 
     
    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
    return newImage; 
     

 
@end 
 
#pragma mark-  
 
@interface SquareCamViewController (InternalMethods) 
- (void)setupAVCapture; 
- (void)teardownAVCapture; 
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation; 
@end 
 
@implementation SquareCamViewController 
 
- (void)setupAVCapture 

    NSError *error = nil; 
     
    AVCaptureSession *session = [AVCaptureSession new]; 
    //UIDevice是一個單例對象,代表當前設備,可以獲取設備信息,比如說分配的名字,設備型號,OS名字,版本號等  
    if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) 
        [session setSessionPreset:AVCaptureSessionPreset640x480]; 
    else 
        [session setSessionPreset:AVCaptureSessionPresetPhoto]; 
     
    // Select a video device, make an input  
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 
    require( error == nil, bail ); 
     
    isUsingFrontFacingCamera = NO; 
    if ( [session canAddInput:deviceInput] ) 
        [session addInput:deviceInput]; 
     
    //AVCaptureStillImageOutput對象,AVCaptureOutput的子類,用來捕捉高質量的持續圖片  
    stillImageOutput = [AVCaptureStillImageOutput new]; 
    [stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:AVCaptureStillImageIsCapturingStillImageContext]; 
    if ( [session canAddOutput:stillImageOutput] ) 
        [session addOutput:stillImageOutput]; 
     
    // Make a video data output  
    videoDataOutput = [AVCaptureVideoDataOutput new]; 
     
    // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'  
    NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject: 
                                       [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 
    [videoDataOutput setVideoSettings:rgbOutputSettings]; 
    [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)  
  
    //dispatch_queue_t對象, 設置AVCaptureVideoDataOutput對SampleBuffer的處理為FIFO隊列方式,這樣沒個幀到 captureOutput:didOutputSampleBuffer:fromConnection:裡面處理的時候都會按照FIFO方式,並且可以為了處理最新幀,拋棄舊的幀,保證不會錯亂順序。  
    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL); 
    [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue]; 
     
    if ( [session canAddOutput:videoDataOutput] ) 
        [session addOutput:videoDataOutput]; 
    [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO]; 
     
    effectiveScale = 1.0; 
    //給定session中獲取一個預覽層  
    previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; 
    [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]]; 
    [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; 
    CALayer *rootLayer = [previewView layer]; 
    [rootLayer setMasksToBounds:YES]; 
    [previewLayer setFrame:[rootLayer bounds]]; 
    [rootLayer addSublayer:previewLayer]; //這裡應該是將預覽層加入到已有層的最上級吧,應該是 棧 方式添加。  
    [session startRunning]; 
     
     
//選擇input設備失敗  
bail: 
    [session release]; 
    if (error) { 
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]] 
                                                            message:[error localizedDescription] 
                                                           delegate:nil  
                                                  cancelButtonTitle:@"Dismiss"  
                                                  otherButtonTitles:nil]; 
        [alertView show]; 
        [alertView release]; 
        [self teardownAVCapture]; 
    } 

 
// clean up capture setup  
- (void)teardownAVCapture 

    [videoDataOutput release]; 
    if (videoDataOutputQueue) 
        dispatch_release(videoDataOutputQueue); 
    [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"]; 
    [stillImageOutput release]; 
    [previewLayer removeFromSuperlayer]; 
    [previewLayer release]; 

 
// perform a flash bulb animation using KVO to monitor the value of the capturingStillImage property of the AVCaptureStillImageOutput class  
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context 

    if ( context == AVCaptureStillImageIsCapturingStillImageContext ) { 
        BOOL isCapturingStillImage = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; 
         
        if ( isCapturingStillImage ) { 
            // do flash bulb like animation  
            flashView = [[UIView alloc] initWithFrame:[previewView frame]]; 
            [flashView setBackgroundColor:[UIColor whiteColor]]; 
            [flashView setAlpha:0.f]; 
            [[[self view] window] addSubview:flashView]; 
             
            [UIView animateWithDuration:.4f 
                             animations:^{ 
                                 [flashView setAlpha:1.f]; 
                             } 
             ]; 
        } 
        else { 
            [UIView animateWithDuration:.4f 
                             animations:^{ 
                                 [flashView setAlpha:0.f]; 
                             } 
                             completion:^(BOOL finished){ 
                                 [flashView removeFromSuperview]; 
                                 [flashView release]; 
                                 flashView = nil; 
                             } 
             ]; 
        } 
    } 

 
// utility routing used during image capture to set up capture orientation  
- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation 

    AVCaptureVideoOrientation result = deviceOrientation; 
    if ( deviceOrientation == UIDeviceOrientationLandscapeLeft ) 
        result = AVCaptureVideoOrientationLandscapeRight; 
    else if ( deviceOrientation == UIDeviceOrientationLandscapeRight ) 
        result = AVCaptureVideoOrientationLandscapeLeft; 
    return result; 

 
// utility routine to create a new image with the red square overlay with appropriate orientation  
// and return the new composited image which can be saved to the camera roll  
- (CGImageRef)newSquareOverlayedImageForFeatures:(NSArray *)features  
                                            inCGImage:(CGImageRef)backgroundImage  
                                      withOrientation:(UIDeviceOrientation)orientation  
                                          frontFacing:(BOOL)isFrontFacing 

    CGImageRef returnImage = NULL; 
    CGRect backgroundImageRect = CGRectMake(0., 0., CGImageGetWidth(backgroundImage), CGImageGetHeight(backgroundImage)); 
    CGContextRef bitmapContext = CreateCGBitmapContextForSize(backgroundImageRect.size); 
    CGContextClearRect(bitmapContext, backgroundImageRect); 
    CGContextDrawImage(bitmapContext, backgroundImageRect, backgroundImage); 
    CGFloat rotationDegrees = 0.; 
     
    switch (orientation) { 
        case UIDeviceOrientationPortrait: 
            rotationDegrees = -90.; 
            break; 
        case UIDeviceOrientationPortraitUpsideDown: 
            rotationDegrees = 90.; 
            break; 
        case UIDeviceOrientationLandscapeLeft: 
            if (isFrontFacing) rotationDegrees = 180.; 
            else rotationDegrees = 0.; 
            break; 
        case UIDeviceOrientationLandscapeRight: 
            if (isFrontFacing) rotationDegrees = 0.; 
            else rotationDegrees = 180.; 
            break; 
        case UIDeviceOrientationFaceUp: 
        case UIDeviceOrientationFaceDown: 
        default: 
            break; // leave the layer in its last known orientation  
    } 
    UIImage *rotatedSquareImage = [square imageRotatedByDegrees:rotationDegrees]; 
     
    // features found by the face detector  
    for ( CIFaceFeature *ff in features ) { 
        CGRect faceRect = [ff bounds]; 
        CGContextDrawImage(bitmapContext, faceRect, [rotatedSquareImage CGImage]); 
    } 
    returnImage = CGBitmapContextCreateImage(bitmapContext); 
    CGContextRelease (bitmapContext); 
     
    return returnImage; 

 
// utility routine used after taking a still image to write the resulting image to the camera roll  
- (BOOL)writeCGImageToCameraRoll:(CGImageRef)cgImage withMetadata:(NSDictionary *)metadata 

    CFMutableDataRef destinationData = CFDataCreateMutable(kCFAllocatorDefault, 0); 
    CGImageDestinationRef destination = CGImageDestinationCreateWithData(destinationData,  
                                                                         CFSTR("public.jpeg"),  
                                                                         1,  
                                                                         NULL); 
    BOOL success = (destination != NULL); 
    require(success, bail); 
 
    const float JPEGCompQuality = 0.85f; // JPEGHigherQuality  
    CFMutableDictionaryRef optionsDict = NULL; 
    CFNumberRef qualityNum = NULL; 
     
    qualityNum = CFNumberCreate(0, kCFNumberFloatType, &JPEGCompQuality);     
    if ( qualityNum ) { 
        optionsDict = CFDictionaryCreateMutable(0, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); 
        if ( optionsDict ) 
            CFDictionarySetValue(optionsDict, kCGImageDestinationLossyCompressionQuality, qualityNum); 
        CFRelease( qualityNum ); 
    } 
     
    CGImageDestinationAddImage( destination, cgImage, optionsDict ); 
    success = CGImageDestinationFinalize( destination ); 
 
    if ( optionsDict ) 
        CFRelease(optionsDict); 
     
    require(success, bail); 
     
    CFRetain(destinationData); 
    ALAssetsLibrary *library = [ALAssetsLibrary new]; 
    [library writeImageDataToSavedPhotosAlbum:(id)destinationData metadata:metadata completionBlock:^(NSURL *assetURL, NSError *error) { 
        if (destinationData) 
            CFRelease(destinationData); 
    }]; 
    [library release]; 
 
//go to語句,require(success, bail);中的success如果為否就跳轉到這裡  
bail: 
    if (destinationData) 
        CFRelease(destinationData); 
    if (destination) 
        CFRelease(destination); 
    return success; 

 
// utility routine to display error aleart if takePicture fails  
- (void)displayErrorOnMainQueue:(NSError *)error withMessage:(NSString *)message 

    dispatch_async(dispatch_get_main_queue(), ^(void) { 
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"%@ (%d)", message, (int)[error code]] 
                                                            message:[error localizedDescription] 
                                                           delegate:nil  
                                                  cancelButtonTitle:@"Dismiss"  
                                                  otherButtonTitles:nil]; 
        [alertView show]; 
        [alertView release]; 
    }); 

 
// turn on/off face detection  
- (IBAction)toggleFaceDetection:(id)sender 

    detectFaces = [(UISwitch *)sender isOn]; 
    [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:detectFaces]; 
    if (!detectFaces) { 
        dispatch_async(dispatch_get_main_queue(), ^(void) { 
            // clear out any squares currently displaying.  
            [self drawFaceBoxesForFeatures:[NSArray array] forVideoBox:CGRectZero orientation:UIDeviceOrientationPortrait]; 
        }); 
    } 

 
// find where the video box is positioned within the preview layer based on the video size and gravity  
+ (CGRect)videoPreviewBoxForGravity:(NSString *)gravity frameSize:(CGSize)frameSize apertureSize:(CGSize)apertureSize 

    CGFloat apertureRatio = apertureSize.height / apertureSize.width; 
    CGFloat viewRatio = frameSize.width / frameSize.height; 
     
    CGSize size = CGSizeZero; 
    if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) { 
        if (viewRatio > apertureRatio) { 
            size.width = frameSize.width; 
            size.height = apertureSize.width * (frameSize.width / apertureSize.height); 
        } else { 
            size.width = apertureSize.height * (frameSize.height / apertureSize.width); 
            size.height = frameSize.height; 
        } 
    } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { 
        if (viewRatio > apertureRatio) { 
            size.width = apertureSize.height * (frameSize.height / apertureSize.width); 
            size.height = frameSize.height; 
        } else { 
            size.width = frameSize.width; 
            size.height = apertureSize.width * (frameSize.width / apertureSize.height); 
        } 
    } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) { 
        size.width = frameSize.width; 
        size.height = frameSize.height; 
    } 
     
    CGRect videoBox; 
    videoBox.size = size; 
    if (size.width < frameSize.width) 
        videoBox.origin.x = (frameSize.width - size.width) / 2; 
    else 
        videoBox.origin.x = (size.width - frameSize.width) / 2; 
     
    if ( size.height < frameSize.height ) 
        videoBox.origin.y = (frameSize.height - size.height) / 2; 
    else 
        videoBox.origin.y = (size.height - frameSize.height) / 2; 
     
    return videoBox; 

 
// called asynchronously as the capture output is capturing sample buffers, this method asks the face detector (if on)  
// to detect features and for each draw the red square in a layer and set appropriate orientation  
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation 

    NSArray *sublayers = [NSArray arrayWithArray:[previewLayer sublayers]]; 
    NSInteger sublayersCount = [sublayers count], currentSublayer = 0; 
    NSInteger featuresCount = [features count], currentFeature = 0; 
     
    //CATransaction, core animation對象,用於批處理將layer樹原子更新到渲染樹(render tree),對layer tree的每隔一個操作必須是transaction的一部分,同時支持嵌入事物(transaction)  
    [CATransaction begin]; 
     
    //kCFBooleanTrue是將boolean對象打包成CF屬性用於core fundation, core foundation是一個提供基礎軟件服務的框架,用於應用服務, 應用環境 和應用本身。同時用於提供common data type抽象, XML屬性列表, URL資源訪問和偏好(iOS整個就是各種框架攢起來的,結構性非常好啊)  
    [CATransaction setValue:(id)kCFBooleanTrue forKey:kCATransactionDisableActions]; 
     
    // hide all the face layers  
    for ( CALayer *layer in sublayers ) { 
        if ( [[layer name] isEqualToString:@"FaceLayer"] ) 
            [layer setHidden:YES]; 
    }    
     
    if ( featuresCount == 0 || !detectFaces ) { 
         
        [CATransaction commit]; 
        return; // early bail.  
    } 
         
    CGSize parentFrameSize = [previewView frame].size; 
    NSString *gravity = [previewLayer videoGravity]; 
    BOOL isMirrored = [previewLayer isMirrored]; 
    CGRect previewBox = [SquareCamViewController videoPreviewBoxForGravity:gravity  
                                                               frameSize:parentFrameSize  
                                                            apertureSize:clap.size]; 
     
    for ( CIFaceFeature *ff in features ) { 
        // find the correct position for the square layer within the previewLayer  
        // the feature box originates in the bottom left of the video frame.  
        // (Bottom right if mirroring is turned on)  
        CGRect faceRect = [ff bounds]; 
 
        // flip preview width and height  
        CGFloat temp = faceRect.size.width; 
        faceRect.size.width = faceRect.size.height; 
        faceRect.size.height = temp; 
        temp = faceRect.origin.x; 
        faceRect.origin.x = faceRect.origin.y; 
        faceRect.origin.y = temp; 
        // scale coordinates so they fit in the preview box, which may be scaled  
        CGFloat widthScaleBy = previewBox.size.width / clap.size.height; 
        CGFloat heightScaleBy = previewBox.size.height / clap.size.width; 
        faceRect.size.width *= widthScaleBy; 
        faceRect.size.height *= heightScaleBy; 
        faceRect.origin.x *= widthScaleBy; 
        faceRect.origin.y *= heightScaleBy; 
 
        if ( isMirrored ) 
            faceRect = CGRectOffset(faceRect, previewBox.origin.x + previewBox.size.width - faceRect.size.width - (faceRect.origin.x * 2), previewBox.origin.y); 
        else 
            faceRect = CGRectOffset(faceRect, previewBox.origin.x, previewBox.origin.y); 
         
        CALayer *featureLayer = nil; 
         
        // re-use an existing layer if possible  
        while ( !featureLayer && (currentSublayer < sublayersCount) ) { 
            CALayer *currentLayer = [sublayers objectAtIndex:currentSublayer++]; 
            if ( [[currentLayer name] isEqualToString:@"FaceLayer"] ) { 
                featureLayer = currentLayer; 
                [currentLayer setHidden:NO]; 
            } 
        } 
         
        // create a new one if necessary  
        if ( !featureLayer ) { 
            featureLayer = [CALayer new]; 
            [featureLayer setContents:(id)[square CGImage]]; 
            [featureLayer setName:@"FaceLayer"]; 
            [previewLayer addSublayer:featureLayer]; 
            [featureLayer release]; 
        } 
        [featureLayer setFrame:faceRect]; 
         
        switch (orientation) { 
            case UIDeviceOrientationPortrait: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(0.))]; 
                break; 
            case UIDeviceOrientationPortraitUpsideDown: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(180.))]; 
                break; 
            case UIDeviceOrientationLandscapeLeft: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(90.))]; 
                break; 
            case UIDeviceOrientationLandscapeRight: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(-90.))]; 
                break; 
            case UIDeviceOrientationFaceUp: 
            case UIDeviceOrientationFaceDown: 
            default: 
                break; // leave the layer in its last known orientation  
        } 
        currentFeature++; 
    } 
     
    [CATransaction commit]; 

 
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{    
    // 實時處理帶監測的圖片  
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
 
    //CMAttachmentBearer是一個基於CF的對象,支持鍵/值/模式 附件API。 任何CF對象都可以添加到CMAttachmentBearer對象,來存儲額外信息。  
    CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate); 
    CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(NSDictionary *)attachments]; 
    if (attachments) 
        CFRelease(attachments); 
    NSDictionary *imageOptions = nil; 
    UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation]; 
    int exifOrientation; 
     
    /* kCGImagePropertyOrientation values
        The intended display orientation of the image. If present, this key is a CFNumber value with the same value as defined
        by the TIFF and EXIF specifications -- see enumeration of integer constants. 
        The value specified where the origin (0,0) of the image is located. If not present, a value of 1 is assumed.
        
        used when calling featuresInImage: options: The value for this key is an integer NSNumber from 1..8 as found in kCGImagePropertyOrientation.
        If present, the detection will be done based on that orientation but the coordinates in the returned features will still be based on those of the image. */ 
         
    enum { 
        PHOTOS_EXIF_0ROW_TOP_0COL_LEFT          = 1, //   1  =  0th row is at the top, and 0th column is on the left (THE DEFAULT).  
        PHOTOS_EXIF_0ROW_TOP_0COL_RIGHT         = 2, //   2  =  0th row is at the top, and 0th column is on the right.    
        PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT      = 3, //   3  =  0th row is at the bottom, and 0th column is on the right.    
        PHOTOS_EXIF_0ROW_BOTTOM_0COL_LEFT       = 4, //   4  =  0th row is at the bottom, and 0th column is on the left.    
        PHOTOS_EXIF_0ROW_LEFT_0COL_TOP          = 5, //   5  =  0th row is on the left, and 0th column is the top.    
        PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP         = 6, //   6  =  0th row is on the right, and 0th column is the top.    
        PHOTOS_EXIF_0ROW_RIGHT_0COL_BOTTOM      = 7, //   7  =  0th row is on the right, and 0th column is the bottom.    
        PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM       = 8  //   8  =  0th row is on the left, and 0th column is the bottom.    
    }; 
     
    switch (curDeviceOrientation) { 
        case UIDeviceOrientationPortraitUpsideDown:  // Device oriented vertically, home button on the top  
            exifOrientation = PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM; 
            break; 
        case UIDeviceOrientationLandscapeLeft:       // Device oriented horizontally, home button on the right  
            if (isUsingFrontFacingCamera) 
                exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT; 
            else 
                exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT; 
            break; 
        case UIDeviceOrientationLandscapeRight:      // Device oriented horizontally, home button on the left  
            if (isUsingFrontFacingCamera) 
                exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT; 
            else 
                exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT; 
            break; 
        case UIDeviceOrientationPortrait:            // Device oriented vertically, home button on the bottom  
        default: 
            exifOrientation = PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP; 
            break; 
    } 
 
    imageOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:exifOrientation] forKey:CIDetectorImageOrientation]; 
     
    //監測人臉  
    NSArray *features = [faceDetector featuresInImage:ciImage options:imageOptions]; 
    [ciImage release]; 
     
    // get the clean aperture  
    // the clean aperture is a rectangle that defines the portion of the encoded pixel dimensions  
    // that represents image data valid for display.  
    CMFormatDescriptionRef fdesc = CMSampleBufferGetFormatDescription(sampleBuffer); 
    CGRect clap = CMVideoFormatDescriptionGetCleanAperture(fdesc, false /*originIsTopLeft == false*/); 
     
    dispatch_async(dispatch_get_main_queue(), ^(void) { 
        [self drawFaceBoxesForFeatures:features forVideoBox:clap orientation:curDeviceOrientation]; 
    }); 

 
- (void)dealloc 

    [self teardownAVCapture]; 
    [faceDetector release]; 
    [square release]; 
    [super dealloc]; 

 
// 前後攝像頭切換  
- (IBAction)switchCameras:(id)sender 

    AVCaptureDevicePosition desiredPosition; 
    if (isUsingFrontFacingCamera) 
        desiredPosition = AVCaptureDevicePositionBack; 
    else 
        desiredPosition = AVCaptureDevicePositionFront; 
     
    for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 
        if ([d position] == desiredPosition) { 
            [[previewLayer session] beginConfiguration]; 
            AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil]; 
            for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) { 
                [[previewLayer session] removeInput:oldInput]; 
            } 
            [[previewLayer session] addInput:input]; 
            [[previewLayer session] commitConfiguration]; 
            break; 
        } 
    } 
    isUsingFrontFacingCamera = !isUsingFrontFacingCamera; 

 
- (void)didReceiveMemoryWarning 

    [super didReceiveMemoryWarning]; 
    // Release any cached data, images, etc that aren't in use.  

 
#pragma mark - View lifecycle  
 
- (void)viewDidLoad 

    [super viewDidLoad]; 
    // Do any additional setup after loading the view, typically from a nib.  
    [self setupAVCapture]; 
    square = [[UIImage imageNamed:@"squarePNG"] retain]; 
    NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil]; 
    faceDetector = [[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions] retain]; 
    [detectorOptions release]; 

 
- (void)viewDidUnload 

    [super viewDidUnload]; 
    // Release any retained subviews of the main view.  
    // e.g. self.myOutlet = nil;  

 
- (void)viewWillAppear:(BOOL)animated 

    [super viewWillAppear:animated]; 

 
- (void)viewDidAppear:(BOOL)animated 

    [super viewDidAppear:animated]; 

 
- (void)viewWillDisappear:(BOOL)animated 

    [super viewWillDisappear:animated]; 

 
- (void)viewDidDisappear:(BOOL)animated 

    [super viewDidDisappear:animated]; 

 
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation 

    // Return YES for supported orientations  
    return (interfaceOrientation == UIInterfaceOrientationPortrait); 

 
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer 

    if ( [gestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] ) { 
        beginGestureScale = effectiveScale; 
    } 
    return YES; 

 
// scale image depending on users pinch gesture  
- (IBAction)handlePinchGesture:(UIPinchGestureRecognizer *)recognizer 

    BOOL allTouchesAreOnThePreviewLayer = YES; 
    NSUInteger numTouches = [recognizer numberOfTouches], i; 
    for ( i = 0; i < numTouches; ++i ) { 
        CGPoint location = [recognizer locationOfTouch:i inView:previewView]; 
        CGPoint convertedLocation = [previewLayer convertPoint:location fromLayer:previewLayer.superlayer]; 
        if ( ! [previewLayer containsPoint:convertedLocation] ) { 
            allTouchesAreOnThePreviewLayer = NO; 
            break; 
        } 
    } 
     
    if ( allTouchesAreOnThePreviewLayer ) { 
        effectiveScale = beginGestureScale * recognizer.scale; 
        if (effectiveScale < 1.0) 
            effectiveScale = 1.0; 
        CGFloat maxScaleAndCropFactor = [[stillImageOutput connectionWithMediaType:AVMediaTypeVideo] videoMaxScaleAndCropFactor]; 
        if (effectiveScale > maxScaleAndCropFactor) 
            effectiveScale = maxScaleAndCropFactor; 
        [CATransaction begin]; 
        [CATransaction setAnimationDuration:.025]; 
        [previewLayer setAffineTransform:CGAffineTransformMakeScale(effectiveScale, effectiveScale)]; 
        [CATransaction commit]; 
    } 

 
@end 

// used for KVO observation of the @"capturingStillImage" property to perform flash bulb animation
static const NSString *AVCaptureStillImageIsCapturingStillImageContext = @"AVCaptureStillImageIsCapturingStillImageContext";

static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};

static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size);
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size)
{
    /**
     CVPixelBuffer(core video pixel buffer): 指的是主內存中的圖片緩存,用來保存圖片像素數據。應用程序在產生圖片幀、解壓縮視頻數據或調用Core Image的時候可以調用此對象
     **/
 CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel;
 CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
 CVPixelBufferRelease( pixelBuffer );
}

//用給定的pixel buffer創建CGImage對象, pixel buffer必須為未壓縮的kCVPixelFormatType_32ARGB 或者 kCVPixelFormatType_32BGRA
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut);
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut)

 OSStatus err = noErr;
 OSType sourcePixelFormat;
 size_t width, height, sourceRowBytes;
 void *sourceBaseAddr = NULL;
 CGBitmapInfo bitmapInfo;
 CGColorSpaceRef colorspace = NULL;
   
    //一些Quartz程序直接提供block數據給程序,而不是從內存中讀取,CGDataProviderRef允許用戶以這種方式提供給Quartz數據。(等於是Quartz定義的一種數據格式)
 CGDataProviderRef provider = NULL;
 CGImageRef image = NULL;
 
 sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer );
 if ( kCVPixelFormatType_32ARGB == sourcePixelFormat )
  bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipFirst;
 else if ( kCVPixelFormatType_32BGRA == sourcePixelFormat )
  bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst;
 else
  return -95014; // only uncompressed pixel formats
 
   
    //獲取pixel buffer中數據屬性
 sourceRowBytes = CVPixelBufferGetBytesPerRow( pixelBuffer );
 width = CVPixelBufferGetWidth( pixelBuffer );
 height = CVPixelBufferGetHeight( pixelBuffer );
 
 CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
   
    //獲取pixel buffer的其實地址?直接內存操作了啊? 靠
 sourceBaseAddr = CVPixelBufferGetBaseAddress( pixelBuffer );
 
 colorspace = CGColorSpaceCreateDeviceRGB();
   
 CVPixelBufferRetain( pixelBuffer );
   
    //將Data轉換為Quartz直接訪問的數據類型,目測其邏輯為制定數據內容,給定起始地址、長度等信息。
 provider = CGDataProviderCreateWithData( (void *)pixelBuffer, sourceBaseAddr, sourceRowBytes * height, ReleaseCVPixelBuffer);
 image = CGImageCreate(width, height, 8, 32, sourceRowBytes, colorspace, bitmapInfo, provider, NULL, true, kCGRenderingIntentDefault);
 
bail:
 if ( err && image ) {
  CGImageRelease( image );
  image = NULL;
 }
 if ( provider ) CGDataProviderRelease( provider );
 if ( colorspace ) CGColorSpaceRelease( colorspace );
 *imageOut = image;
 return err;
}

// utility used by newSquareOverlayedImageForFeatures for
static CGContextRef CreateCGBitmapContextForSize(CGSize size);
static CGContextRef CreateCGBitmapContextForSize(CGSize size)
{
    CGContextRef    context = NULL;
    CGColorSpaceRef colorSpace;   //
    int             bitmapBytesPerRow;
 
    bitmapBytesPerRow = (size.width * 4);
 
    colorSpace = CGColorSpaceCreateDeviceRGB();
    context = CGBitmapContextCreate (NULL,
          size.width,
          size.height,
          8,      // bits per component
          bitmapBytesPerRow,
          colorSpace,
          kCGImageAlphaPremultipliedLast);
 CGContextSetAllowsAntialiasing(context, NO);
    CGColorSpaceRelease( colorSpace );
    return context;
}

#pragma mark-

@interface UIImage (RotationMethods)
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees;
@end

@implementation UIImage (RotationMethods)

- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees
{  
 // calculate the size of the rotated view's containing box for our drawing space
 UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)];
   
    //CGAffineTransform 結構體用於旋轉一個坐標系統,
 CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees));
 rotatedViewBox.transform = t;
 CGSize rotatedSize = rotatedViewBox.frame.size;
 [rotatedViewBox release];
 
 // Create the bitmap context
 UIGraphicsBeginImageContext(rotatedSize); //創建一個bitmap類型的graphic context,並將其置為當前context
 CGContextRef bitmap = UIGraphicsGetCurrentContext();
 
 // Move the origin to the middle of the image so we will rotate and scale around the center.
 CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
 
 //   // Rotate the image context
 CGContextRotateCTM(bitmap, DegreesToRadians(degrees));
 
 // Now, draw the rotated/scaled image into the context
 CGContextScaleCTM(bitmap, 1.0, -1.0);
 CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);
 
 UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
 UIGraphicsEndImageContext();
 return newImage;
 
}

@end

#pragma mark-

@interface SquareCamViewController (InternalMethods)
- (void)setupAVCapture;
- (void)teardownAVCapture;
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation;
@end

@implementation SquareCamViewController

- (void)setupAVCapture
{
 NSError *error = nil;
 
 AVCaptureSession *session = [AVCaptureSession new];
    //UIDevice是一個單例對象,代表當前設備,可以獲取設備信息,比如說分配的名字,設備型號,OS名字,版本號等
 if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone)
     [session setSessionPreset:AVCaptureSessionPreset640x480];
 else
     [session setSessionPreset:AVCaptureSessionPresetPhoto];
 
    // Select a video device, make an input
 AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
 AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
 require( error == nil, bail );
 
    isUsingFrontFacingCamera = NO;
 if ( [session canAddInput:deviceInput] )
  [session addInput:deviceInput];
 
    //AVCaptureStillImageOutput對象,AVCaptureOutput的子類,用來捕捉高質量的持續圖片
 stillImageOutput = [AVCaptureStillImageOutput new];
 [stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:AVCaptureStillImageIsCapturingStillImageContext];
 if ( [session canAddOutput:stillImageOutput] )
  [session addOutput:stillImageOutput];
 
    // Make a video data output
 videoDataOutput = [AVCaptureVideoDataOutput new];
 
    // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
 NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
            [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
 [videoDataOutput setVideoSettings:rgbOutputSettings];
 [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)
 
 //dispatch_queue_t對象, 設置AVCaptureVideoDataOutput對SampleBuffer的處理為FIFO隊列方式,這樣沒個幀到 captureOutput:didOutputSampleBuffer:fromConnection:裡面處理的時候都會按照FIFO方式,並且可以為了處理最新幀,拋棄舊的幀,保證不會錯亂順序。
    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
 [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
 
    if ( [session canAddOutput:videoDataOutput] )
  [session addOutput:videoDataOutput];
 [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO];
 
 effectiveScale = 1.0;
    //給定session中獲取一個預覽層
 previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
 [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
 [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
 CALayer *rootLayer = [previewView layer];
 [rootLayer setMasksToBounds:YES];
 [previewLayer setFrame:[rootLayer bounds]];
 [rootLayer addSublayer:previewLayer]; //這裡應該是將預覽層加入到已有層的最上級吧,應該是 棧 方式添加。
 [session startRunning];
   
   
//選擇input設備失敗
bail:
 [session release];
 if (error) {
  UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
               message:[error localizedDescription]
                 delegate:nil
              cancelButtonTitle:@"Dismiss"
              otherButtonTitles:nil];
  [alertView show];
  [alertView release];
  [self teardownAVCapture];
 }
}

// clean up capture setup
- (void)teardownAVCapture
{
 [videoDataOutput release];
 if (videoDataOutputQueue)
  dispatch_release(videoDataOutputQueue);
 [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"];
 [stillImageOutput release];
 [previewLayer removeFromSuperlayer];
 [previewLayer release];
}

// perform a flash bulb animation using KVO to monitor the value of the capturingStillImage property of the AVCaptureStillImageOutput class
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
 if ( context == AVCaptureStillImageIsCapturingStillImageContext ) {
  BOOL isCapturingStillImage = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
  
  if ( isCapturingStillImage ) {
   // do flash bulb like animation
   flashView = [[UIView alloc] initWithFrame:[previewView frame]];
   [flashView setBackgroundColor:[UIColor whiteColor]];
   [flashView setAlpha:0.f];
   [[[self view] window] addSubview:flashView];
   
   [UIView animateWithDuration:.4f
        animations:^{
         [flashView setAlpha:1.f];
        }
    ];
  }
  else {
   [UIView animateWithDuration:.4f
        animations:^{
         [flashView setAlpha:0.f];
        }
        completion:^(BOOL finished){
         [flashView removeFromSuperview];
         [flashView release];
         flashView = nil;
        }
    ];
  }
 }
}

// utility routing used during image capture to set up capture orientation
- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
 AVCaptureVideoOrientation result = deviceOrientation;
 if ( deviceOrientation == UIDeviceOrientationLandscapeLeft )
  result = AVCaptureVideoOrientationLandscapeRight;
 else if ( deviceOrientation == UIDeviceOrientationLandscapeRight )
  result = AVCaptureVideoOrientationLandscapeLeft;
 return result;
}

// utility routine to create a new image with the red square overlay with appropriate orientation
// and return the new composited image which can be saved to the camera roll
- (CGImageRef)newSquareOverlayedImageForFeatures:(NSArray *)features
           inCGImage:(CGImageRef)backgroundImage
           withOrientation:(UIDeviceOrientation)orientation
            frontFacing:(BOOL)isFrontFacing
{
 CGImageRef returnImage = NULL;
 CGRect backgroundImageRect = CGRectMake(0., 0., CGImageGetWidth(backgroundImage), CGImageGetHeight(backgroundImage));
 CGContextRef bitmapContext = CreateCGBitmapContextForSize(backgroundImageRect.size);
 CGContextClearRect(bitmapContext, backgroundImageRect);
 CGContextDrawImage(bitmapContext, backgroundImageRect, backgroundImage);
 CGFloat rotationDegrees = 0.;
 
 switch (orientation) {
  case UIDeviceOrientationPortrait:
   rotationDegrees = -90.;
   break;
  case UIDeviceOrientationPortraitUpsideDown:
   rotationDegrees = 90.;
   break;
  case UIDeviceOrientationLandscapeLeft:
   if (isFrontFacing) rotationDegrees = 180.;
   else rotationDegrees = 0.;
   break;
  case UIDeviceOrientationLandscapeRight:
   if (isFrontFacing) rotationDegrees = 0.;
   else rotationDegrees = 180.;
   break;
  case UIDeviceOrientationFaceUp:
  case UIDeviceOrientationFaceDown:
  default:
   break; // leave the layer in its last known orientation
 }
 UIImage *rotatedSquareImage = [square imageRotatedByDegrees:rotationDegrees];
 
    // features found by the face detector
 for ( CIFaceFeature *ff in features ) {
  CGRect faceRect = [ff bounds];
  CGContextDrawImage(bitmapContext, faceRect, [rotatedSquareImage CGImage]);
 }
 returnImage = CGBitmapContextCreateImage(bitmapContext);
 CGContextRelease (bitmapContext);
 
 return returnImage;
}

// utility routine used after taking a still image to write the resulting image to the camera roll
- (BOOL)writeCGImageToCameraRoll:(CGImageRef)cgImage withMetadata:(NSDictionary *)metadata
{
 CFMutableDataRef destinationData = CFDataCreateMutable(kCFAllocatorDefault, 0);
 CGImageDestinationRef destination = CGImageDestinationCreateWithData(destinationData,
                   CFSTR("public.jpeg"),
                   1,
                   NULL);
 BOOL success = (destination != NULL);
 require(success, bail);

 const float JPEGCompQuality = 0.85f; // JPEGHigherQuality
 CFMutableDictionaryRef optionsDict = NULL;
 CFNumberRef qualityNum = NULL;
 
 qualityNum = CFNumberCreate(0, kCFNumberFloatType, &JPEGCompQuality);   
 if ( qualityNum ) {
  optionsDict = CFDictionaryCreateMutable(0, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
  if ( optionsDict )
   CFDictionarySetValue(optionsDict, kCGImageDestinationLossyCompressionQuality, qualityNum);
  CFRelease( qualityNum );
 }
 
 CGImageDestinationAddImage( destination, cgImage, optionsDict );
 success = CGImageDestinationFinalize( destination );

 if ( optionsDict )
  CFRelease(optionsDict);
 
 require(success, bail);
 
 CFRetain(destinationData);
 ALAssetsLibrary *library = [ALAssetsLibrary new];
 [library writeImageDataToSavedPhotosAlbum:(id)destinationData metadata:metadata completionBlock:^(NSURL *assetURL, NSError *error) {
  if (destinationData)
   CFRelease(destinationData);
 }];
 [library release];

//go to語句,require(success, bail);中的success如果為否就跳轉到這裡
bail:
 if (destinationData)
  CFRelease(destinationData);
 if (destination)
  CFRelease(destination);
 return success;
}

// utility routine to display error aleart if takePicture fails
- (void)displayErrorOnMainQueue:(NSError *)error withMessage:(NSString *)message
{
 dispatch_async(dispatch_get_main_queue(), ^(void) {
  UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"%@ (%d)", message, (int)[error code]]
               message:[error localizedDescription]
                 delegate:nil
              cancelButtonTitle:@"Dismiss"
              otherButtonTitles:nil];
  [alertView show];
  [alertView release];
 });
}

// turn on/off face detection
- (IBAction)toggleFaceDetection:(id)sender
{
 detectFaces = [(UISwitch *)sender isOn];
 [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:detectFaces];
 if (!detectFaces) {
  dispatch_async(dispatch_get_main_queue(), ^(void) {
   // clear out any squares currently displaying.
   [self drawFaceBoxesForFeatures:[NSArray array] forVideoBox:CGRectZero orientation:UIDeviceOrientationPortrait];
  });
 }
}

// find where the video box is positioned within the preview layer based on the video size and gravity
+ (CGRect)videoPreviewBoxForGravity:(NSString *)gravity frameSize:(CGSize)frameSize apertureSize:(CGSize)apertureSize
{
    CGFloat apertureRatio = apertureSize.height / apertureSize.width;
    CGFloat viewRatio = frameSize.width / frameSize.height;
   
    CGSize size = CGSizeZero;
    if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
        if (viewRatio > apertureRatio) {
            size.width = frameSize.width;
            size.height = apertureSize.width * (frameSize.width / apertureSize.height);
        } else {
            size.width = apertureSize.height * (frameSize.height / apertureSize.width);
            size.height = frameSize.height;
        }
    } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
        if (viewRatio > apertureRatio) {
            size.width = apertureSize.height * (frameSize.height / apertureSize.width);
            size.height = frameSize.height;
        } else {
            size.width = frameSize.width;
            size.height = apertureSize.width * (frameSize.width / apertureSize.height);
        }
    } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) {
        size.width = frameSize.width;
        size.height = frameSize.height;
    }
 
 CGRect videoBox;
 videoBox.size = size;
 if (size.width < frameSize.width)
  videoBox.origin.x = (frameSize.width - size.width) / 2;
 else
  videoBox.origin.x = (size.width - frameSize.width) / 2;
 
 if ( size.height < frameSize.height )
  videoBox.origin.y = (frameSize.height - size.height) / 2;
 else
  videoBox.origin.y = (size.height - frameSize.height) / 2;
   
 return videoBox;
}

// called asynchronously as the capture output is capturing sample buffers, this method asks the face detector (if on)
// to detect features and for each draw the red square in a layer and set appropriate orientation
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation
{
 NSArray *sublayers = [NSArray arrayWithArray:[previewLayer sublayers]];
 NSInteger sublayersCount = [sublayers count], currentSublayer = 0;
 NSInteger featuresCount = [features count], currentFeature = 0;
 
    //CATransaction, core animation對象,用於批處理將layer樹原子更新到渲染樹(render tree),對layer tree的每隔一個操作必須是transaction的一部分,同時支持嵌入事物(transaction)
 [CATransaction begin];
   
    //kCFBooleanTrue是將boolean對象打包成CF屬性用於core fundation, core foundation是一個提供基礎軟件服務的框架,用於應用服務, 應用環境 和應用本身。同時用於提供common data type抽象, XML屬性列表, URL資源訪問和偏好(iOS整個就是各種框架攢起來的,結構性非常好啊)
 [CATransaction setValue:(id)kCFBooleanTrue forKey:kCATransactionDisableActions];
 
 // hide all the face layers
 for ( CALayer *layer in sublayers ) {
  if ( [[layer name] isEqualToString:@"FaceLayer"] )
   [layer setHidden:YES];
 } 
 
 if ( featuresCount == 0 || !detectFaces ) {
       
  [CATransaction commit];
  return; // early bail.
 }
  
 CGSize parentFrameSize = [previewView frame].size;
 NSString *gravity = [previewLayer videoGravity];
 BOOL isMirrored = [previewLayer isMirrored];
 CGRect previewBox = [SquareCamViewController videoPreviewBoxForGravity:gravity
                  frameSize:parentFrameSize
               apertureSize:clap.size];
 
 for ( CIFaceFeature *ff in features ) {
  // find the correct position for the square layer within the previewLayer
  // the feature box originates in the bottom left of the video frame.
  // (Bottom right if mirroring is turned on)
  CGRect faceRect = [ff bounds];

  // flip preview width and height
  CGFloat temp = faceRect.size.width;
  faceRect.size.width = faceRect.size.height;
  faceRect.size.height = temp;
  temp = faceRect.origin.x;
  faceRect.origin.x = faceRect.origin.y;
  faceRect.origin.y = temp;
  // scale coordinates so they fit in the preview box, which may be scaled
  CGFloat widthScaleBy = previewBox.size.width / clap.size.height;
  CGFloat heightScaleBy = previewBox.size.height / clap.size.width;
  faceRect.size.width *= widthScaleBy;
  faceRect.size.height *= heightScaleBy;
  faceRect.origin.x *= widthScaleBy;
  faceRect.origin.y *= heightScaleBy;

  if ( isMirrored )
   faceRect = CGRectOffset(faceRect, previewBox.origin.x + previewBox.size.width - faceRect.size.width - (faceRect.origin.x * 2), previewBox.origin.y);
  else
   faceRect = CGRectOffset(faceRect, previewBox.origin.x, previewBox.origin.y);
  
  CALayer *featureLayer = nil;
  
  // re-use an existing layer if possible
  while ( !featureLayer && (currentSublayer < sublayersCount) ) {
   CALayer *currentLayer = [sublayers objectAtIndex:currentSublayer++];
   if ( [[currentLayer name] isEqualToString:@"FaceLayer"] ) {
    featureLayer = currentLayer;
    [currentLayer setHidden:NO];
   }
  }
  
  // create a new one if necessary
  if ( !featureLayer ) {
   featureLayer = [CALayer new];
   [featureLayer setContents:(id)[square CGImage]];
   [featureLayer setName:@"FaceLayer"];
   [previewLayer addSublayer:featureLayer];
   [featureLayer release];
  }
  [featureLayer setFrame:faceRect];
  
  switch (orientation) {
   case UIDeviceOrientationPortrait:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(0.))];
    break;
   case UIDeviceOrientationPortraitUpsideDown:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(180.))];
    break;
   case UIDeviceOrientationLandscapeLeft:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(90.))];
    break;
   case UIDeviceOrientationLandscapeRight:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(-90.))];
    break;
   case UIDeviceOrientationFaceUp:
   case UIDeviceOrientationFaceDown:
   default:
    break; // leave the layer in its last known orientation
  }
  currentFeature++;
 }
 
 [CATransaction commit];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection

 // 實時處理帶監測的圖片
 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    //CMAttachmentBearer是一個基於CF的對象,支持鍵/值/模式 附件API。 任何CF對象都可以添加到CMAttachmentBearer對象,來存儲額外信息。
 CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
 CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(NSDictionary *)attachments];
 if (attachments)
  CFRelease(attachments);
 NSDictionary *imageOptions = nil;
 UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];
 int exifOrientation;
 
    /* kCGImagePropertyOrientation values
        The intended display orientation of the image. If present, this key is a CFNumber value with the same value as defined
        by the TIFF and EXIF specifications -- see enumeration of integer constants.
        The value specified where the origin (0,0) of the image is located. If not present, a value of 1 is assumed.
       
        used when calling featuresInImage: options: The value for this key is an integer NSNumber from 1..8 as found in kCGImagePropertyOrientation.
        If present, the detection will be done based on that orientation but the coordinates in the returned features will still be based on those of the image. */
       
 enum {
  PHOTOS_EXIF_0ROW_TOP_0COL_LEFT   = 1, //   1  =  0th row is at the top, and 0th column is on the left (THE DEFAULT).
  PHOTOS_EXIF_0ROW_TOP_0COL_RIGHT   = 2, //   2  =  0th row is at the top, and 0th column is on the right. 
  PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT      = 3, //   3  =  0th row is at the bottom, and 0th column is on the right. 
  PHOTOS_EXIF_0ROW_BOTTOM_0COL_LEFT       = 4, //   4  =  0th row is at the bottom, and 0th column is on the left. 
  PHOTOS_EXIF_0ROW_LEFT_0COL_TOP          = 5, //   5  =  0th row is on the left, and 0th column is the top. 
  PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP         = 6, //   6  =  0th row is on the right, and 0th column is the top. 
  PHOTOS_EXIF_0ROW_RIGHT_0COL_BOTTOM      = 7, //   7  =  0th row is on the right, and 0th column is the bottom. 
  PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM       = 8  //   8  =  0th row is on the left, and 0th column is the bottom. 
 };
 
 switch (curDeviceOrientation) {
  case UIDeviceOrientationPortraitUpsideDown:  // Device oriented vertically, home button on the top
   exifOrientation = PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM;
   break;
  case UIDeviceOrientationLandscapeLeft:       // Device oriented horizontally, home button on the right
   if (isUsingFrontFacingCamera)
    exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
   else
    exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
   break;
  case UIDeviceOrientationLandscapeRight:      // Device oriented horizontally, home button on the left
   if (isUsingFrontFacingCamera)
    exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
   else
    exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
   break;
  case UIDeviceOrientationPortrait:            // Device oriented vertically, home button on the bottom
  default:
   exifOrientation = PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP;
   break;
 }

 imageOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:exifOrientation] forKey:CIDetectorImageOrientation];
   
    //監測人臉
 NSArray *features = [faceDetector featuresInImage:ciImage options:imageOptions];
 [ciImage release];
 
    // get the clean aperture
    // the clean aperture is a rectangle that defines the portion of the encoded pixel dimensions
    // that represents image data valid for display.
 CMFormatDescriptionRef fdesc = CMSampleBufferGetFormatDescription(sampleBuffer);
 CGRect clap = CMVideoFormatDescriptionGetCleanAperture(fdesc, false /*originIsTopLeft == false*/);
 
 dispatch_async(dispatch_get_main_queue(), ^(void) {
  [self drawFaceBoxesForFeatures:features forVideoBox:clap orientation:curDeviceOrientation];
 });
}

- (void)dealloc
{
 [self teardownAVCapture];
 [faceDetector release];
 [square release];
 [super dealloc];
}

// 前後攝像頭切換
- (IBAction)switchCameras:(id)sender
{
 AVCaptureDevicePosition desiredPosition;
 if (isUsingFrontFacingCamera)
  desiredPosition = AVCaptureDevicePositionBack;
 else
  desiredPosition = AVCaptureDevicePositionFront;
 
 for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
  if ([d position] == desiredPosition) {
   [[previewLayer session] beginConfiguration];
   AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
   for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
    [[previewLayer session] removeInput:oldInput];
   }
   [[previewLayer session] addInput:input];
   [[previewLayer session] commitConfiguration];
   break;
  }
 }
 isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Release any cached data, images, etc that aren't in use.
}

#pragma mark - View lifecycle

- (void)viewDidLoad
{
    [super viewDidLoad];
 // Do any additional setup after loading the view, typically from a nib.
 [self setupAVCapture];
 square = [[UIImage imageNamed:@"squarePNG"] retain];
 NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil];
 faceDetector = [[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions] retain];
 [detectorOptions release];
}

- (void)viewDidUnload
{
    [super viewDidUnload];
    // Release any retained subviews of the main view.
    // e.g. self.myOutlet = nil;
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
}

- (void)viewDidAppear:(BOOL)animated
{
    [super viewDidAppear:animated];
}

- (void)viewWillDisappear:(BOOL)animated
{
 [super viewWillDisappear:animated];
}

- (void)viewDidDisappear:(BOOL)animated
{
 [super viewDidDisappear:animated];
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
    // Return YES for supported orientations
 return (interfaceOrientation == UIInterfaceOrientationPortrait);
}

- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer
{
 if ( [gestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] ) {
  beginGestureScale = effectiveScale;
 }
 return YES;
}

// scale image depending on users pinch gesture
- (IBAction)handlePinchGesture:(UIPinchGestureRecognizer *)recognizer
{
 BOOL allTouchesAreOnThePreviewLayer = YES;
 NSUInteger numTouches = [recognizer numberOfTouches], i;
 for ( i = 0; i < numTouches; ++i ) {
  CGPoint location = [recognizer locationOfTouch:i inView:previewView];
  CGPoint convertedLocation = [previewLayer convertPoint:location fromLayer:previewLayer.superlayer];
  if ( ! [previewLayer containsPoint:convertedLocation] ) {
   allTouchesAreOnThePreviewLayer = NO;
   break;
  }
 }
 
 if ( allTouchesAreOnThePreviewLayer ) {
  effectiveScale = beginGestureScale * recognizer.scale;
  if (effectiveScale < 1.0)
   effectiveScale = 1.0;
  CGFloat maxScaleAndCropFactor = [[stillImageOutput connectionWithMediaType:AVMediaTypeVideo] videoMaxScaleAndCropFactor];
  if (effectiveScale > maxScaleAndCropFactor)
   effectiveScale = maxScaleAndCropFactor;
  [CATransaction begin];
  [CATransaction setAnimationDuration:.025];
  [previewLayer setAffineTransform:CGAffineTransformMakeScale(effectiveScale, effectiveScale)];
  [CATransaction commit];
 }
}

@end

 

  1. 上一頁:
  2. 下一頁:
蘋果刷機越獄教程| IOS教程問題解答| IOS技巧綜合| IOS7技巧| IOS8教程
Copyright © Ios教程網 All Rights Reserved