How to use AVCapturePhotoOutput How to use AVCapturePhotoOutput ios ios

How to use AVCapturePhotoOutput


Updated to Swift 4Hi it's really easy to use AVCapturePhotoOutput.

You need the AVCapturePhotoCaptureDelegate which returns the CMSampleBuffer.

You can get as well a preview image if you tell the AVCapturePhotoSettings the previewFormat

    class CameraCaptureOutput: NSObject, AVCapturePhotoCaptureDelegate {        let cameraOutput = AVCapturePhotoOutput()        func capturePhoto() {          let settings = AVCapturePhotoSettings()          let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!          let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,                               kCVPixelBufferWidthKey as String: 160,                               kCVPixelBufferHeightKey as String: 160]          settings.previewPhotoFormat = previewFormat          self.cameraOutput.capturePhoto(with: settings, delegate: self)        }func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {                                    if let error = error {                print(error.localizedDescription)            }            if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {              print("image: \(UIImage(data: dataImage)?.size)") // Your Image            }           }    }

For more information visit https://developer.apple.com/reference/AVFoundation/AVCapturePhotoOutput

Note: You have to add the AVCapturePhotoOutput to the AVCaptureSession before taking the picture. So something like: session.addOutput(output), and then: output.capturePhoto(with:settings, delegate:self) Thanks @BigHeadCreations


There is my full implementation

import UIKitimport AVFoundationclass ViewController: UIViewController, AVCapturePhotoCaptureDelegate {var captureSesssion : AVCaptureSession!var cameraOutput : AVCapturePhotoOutput!var previewLayer : AVCaptureVideoPreviewLayer!@IBOutlet weak var capturedImage: UIImageView!@IBOutlet weak var previewView: UIView!override func viewDidLoad() {    super.viewDidLoad()    captureSesssion = AVCaptureSession()    captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto    cameraOutput = AVCapturePhotoOutput()    let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)    if let input = try? AVCaptureDeviceInput(device: device) {        if (captureSesssion.canAddInput(input)) {            captureSesssion.addInput(input)            if (captureSesssion.canAddOutput(cameraOutput)) {                captureSesssion.addOutput(cameraOutput)                previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion)                previewLayer.frame = previewView.bounds                previewView.layer.addSublayer(previewLayer)                captureSesssion.startRunning()            }        } else {            print("issue here : captureSesssion.canAddInput")        }    } else {        print("some problem here")    }}// Take picture button@IBAction func didPressTakePhoto(_ sender: UIButton) {    let settings = AVCapturePhotoSettings()    let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!    let previewFormat = [         kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,         kCVPixelBufferWidthKey as String: 160,         kCVPixelBufferHeightKey as String: 160    ]    settings.previewPhotoFormat = previewFormat    cameraOutput.capturePhoto(with: settings, delegate: self)}// callBack from take picturefunc capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {    if let error = error {        print("error occure : \(error.localizedDescription)")    }    if  let sampleBuffer = photoSampleBuffer,        let previewBuffer = previewPhotoSampleBuffer,        let dataImage =  AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer:  sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {        print(UIImage(data: dataImage)?.size as Any)        let dataProvider = CGDataProvider(data: dataImage as CFData)        let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)        let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)        self.capturedImage.image = image    } else {        print("some error here")    }}// This method you can use somewhere you need to know camera permission   statefunc askPermission() {    print("here")    let cameraPermissionStatus =  AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)    switch cameraPermissionStatus {    case .authorized:        print("Already Authorized")    case .denied:        print("denied")        let alert = UIAlertController(title: "Sorry :(" , message: "But  could you please grant permission for camera within device settings",  preferredStyle: .alert)        let action = UIAlertAction(title: "Ok", style: .cancel,  handler: nil)        alert.addAction(action)        present(alert, animated: true, completion: nil)    case .restricted:        print("restricted")    default:        AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: {            [weak self]            (granted :Bool) -> Void in            if granted == true {                // User granted                print("User granted") DispatchQueue.main.async(){            //Do smth that you need in main thread               }             }            else {                // User Rejected                print("User Rejected")DispatchQueue.main.async(){            let alert = UIAlertController(title: "WHY?" , message:  "Camera it is the main feature of our application", preferredStyle: .alert)                let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)                alert.addAction(action)                self?.present(alert, animated: true, completion: nil)              }             }        });    }}}


In iOS 11 "photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {}" is deprecated.

Use following method:

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {    let imageData = photo.fileDataRepresentation()    if let data = imageData, let img = UIImage(data: data) {        print(img)    }}