AVCaptureStillImageOutput vs AVCapturePhotoOutput in Swift 3

13,597

Solution 1

AVCaptureStillImageOutput being deprecated means you can keep using it in iOS 10, but:

  • Apple makes no promises as to how long past iOS 10 it'll stay available.
  • as new hardware and software features get added in iOS 10 and beyond, you won't get access to all of them. For example, you can set up AVCaptureStillImageOutput for wide color but it's a lot easier to do wide color with AVCapturePhotoOutput. And for RAW capture or Live Photos, AVCapturePhotoOutput is the only game in town.

If you're happy proceeding despite the deprecation, your issue isn't that outputSettings is removed — it's still there.

Something to be aware of for beta 6 and beyond (though it turns out not to be an issue here): APIs that use NSDictionary without explicit key and value types come into Swift 3 as [AnyHashable: Any] and the Foundation or CoreFoundation types you might use in a dictionary are no longer implicitly bridged to Swift types. (Some of the other questions about beta 6 dictionary conversions might point you in the right direction there.)

However, I'm not getting any compilation errors for setting outputSettings. Whether in your full code or by reducing it to the essential parts for that line:

var stillImageOutput : AVCaptureStillImageOutput?
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

...the only warnings I see are about the deprecation.

Solution 2

There is my full implementation

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {

    var captureSesssion : AVCaptureSession!
    var cameraOutput : AVCapturePhotoOutput!
    var previewLayer : AVCaptureVideoPreviewLayer!

    @IBOutlet weak var capturedImage: UIImageView!
    @IBOutlet weak var previewView: UIView!

    override func viewDidLoad() {
        super.viewDidLoad()
        captureSesssion = AVCaptureSession()
        captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto
        cameraOutput = AVCapturePhotoOutput()

        let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)

        if let input = try? AVCaptureDeviceInput(device: device) {
            if captureSesssion.canAddInput(input) {
                captureSesssion.addInput(input)
                if captureSesssion.canAddOutput(cameraOutput) {
                    captureSesssion.addOutput(cameraOutput)
                    previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion)
                    previewLayer.frame = previewView.bounds
                    previewView.layer.addSublayer(previewLayer)
                    captureSesssion.startRunning()
                }
            } else {
                print("issue here : captureSesssion.canAddInput")
            }
        } else {
            print("some problem here")
        }
    }

    // Take picture button
    @IBAction func didPressTakePhoto(_ sender: UIButton) {
        let settings = AVCapturePhotoSettings()
        let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
        let previewFormat = [
            kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
            kCVPixelBufferWidthKey as String: 160,
            kCVPixelBufferHeightKey as String: 160
        ]
        settings.previewPhotoFormat = previewFormat
        cameraOutput.capturePhoto(with: settings, delegate: self)
    }

    // callBack from take picture
    func capture(_ captureOutput: AVCapturePhotoOutput,  didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?,  previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings:  AVCaptureResolvedPhotoSettings, bracketSettings:   AVCaptureBracketedStillImageSettings?, error: Error?) {

        if let error = error {
            print("error occure : \(error.localizedDescription)")
        }

        if  let sampleBuffer = photoSampleBuffer,
            let previewBuffer = previewPhotoSampleBuffer,
            let dataImage =  AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer:  sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
            print(UIImage(data: dataImage)?.size as Any)

            let dataProvider = CGDataProvider(data: dataImage as CFData)
            let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
            let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)

            self.capturedImage.image = image
        } else {
            print("some error here")
        }
    }

    // This method you can use somewhere you need to know camera permission   state
    func askPermission() {
        print("here")

        let cameraPermissionStatus =  AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)

        switch cameraPermissionStatus {
        case .authorized:
            print("Already Authorized")

        case .denied:
            print("denied")

            let alert = UIAlertController(title: "Sorry :(" , message: "But could you please grant permission for camera within device settings",  preferredStyle: .alert)
            let action = UIAlertAction(title: "Ok", style: .cancel,  handler: nil)
            alert.addAction(action)
            present(alert, animated: true, completion: nil)

        case .restricted:
            print("restricted")

        default:
            AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) {
                [weak self]
                (granted :Bool) -> Void in

                if granted == true {
                    // User granted
                    print("User granted")
                    DispatchQueue.main.async() {
                        // Do smth that you need in main thread   
                    } 
                } else {
                    // User Rejected
                    print("User Rejected")

                    DispatchQueue.main.async() {
                        let alert = UIAlertController(title: "WHY?" , message: "Camera it is the main feature of our application", preferredStyle: .alert)
                        let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
                        alert.addAction(action)
                        self?.present(alert, animated: true, completion: nil)  
                    }
                }
            }
        }
    }
}
Share:
13,597
Andriyas  Redel
Author by

Andriyas Redel

Updated on June 04, 2022

Comments

  • Andriyas  Redel
    Andriyas Redel almost 2 years

    I am trying to simply put a Camera View in my View Controller.

    I imported AVFoundation at the top, as well as UIImagePickerControllerDelegate and UINavigationControllerDelegate classes.

    However, whenever I try to use AVCaptureStillImageOutput, Xcode tells me that it was deprecated in iOS10 and I should use AVCapturePhotoOutput. That is completely fine, however, as soon as I want to call stillImageOutput.outputSettings, .outputSettings itself is not available. Thus, I have to use AVAVCaptureStillImageOutput for it to work but I have multiple warnings because this function was deprecated in iOS10.

    I searched and searched but could not really find the solution around it. I would really appreciate your help. I am learning so any explanation would be great! Code is below.

    import UIKit
    import AVFoundation
    
    class CameraView: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
    
        var captureSession : AVCaptureSession?
        var stillImageOutput : AVCaptureStillImageOutput?
        var previewLayer : AVCaptureVideoPreviewLayer?
    
        @IBOutlet var cameraView: UIView!
    
        override func viewWillAppear(_ animated: Bool) {
            super.viewWillAppear(animated)
            captureSession = AVCaptureSession()
            captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080
    
            var backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            var error : NSError?
    
            do {
                var input = try! AVCaptureDeviceInput (device: backCamera)
                if (error == nil && captureSession?.canAddInput(input) != nil) {
    
                    captureSession?.addInput(input)
    
                    stillImageOutput = AVCaptureStillImageOutput()
                    stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    
                    if (captureSession?.canAddOutput(stillImageOutput) != nil) {
                        captureSession?.addOutput(stillImageOutput)
    
                        previewLayer = AVCaptureVideoPreviewLayer (session: captureSession)
                        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
                        previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
                        cameraView.layer.addSublayer(previewLayer!)
                        captureSession?.startRunning()
                    }
                }
            } catch {
    
            }
        }
    }
    
  • Andriyas  Redel
    Andriyas Redel over 7 years
    Thank you so much!
  • Nikhil Manapure
    Nikhil Manapure about 7 years
    Thanks a lot for your answer. :)