Capture current camera image using AVFoundation

11,542

Solution 1

This is what I am using on one of the app that I am working on. Should be helpful for your problem as well.

func capturePicture(){

    println("Capturing image")
    stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    captureSession.addOutput(stillImageOutput)

    if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){
            stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
                    (sampleBuffer, error) in
                var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
                var dataProvider = CGDataProviderCreateWithCFData(imageData)
                var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
                var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right)

                var imageView = UIImageView(image: image)
                imageView.frame = CGRect(x:0, y:0, width:self.screenSize.width, height:self.screenSize.height)

                //Show the captured image to
                self.view.addSubview(imageView)

                //Save the captured preview to image
                UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)

                })
        }
}

Solution 2

Updated for Swift 3:

 var stillImageOutput = AVCaptureStillImageOutput.init()
 stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
 self.cameraSession.addOutput(stillImageOutput)

 if let videoConnection = stillImageOutput.connection(withMediaType:AVMediaTypeVideo){
     stillImageOutput.captureStillImageAsynchronously(from:videoConnection, completionHandler: {
     (sampleBuffer, error) in
     var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
     var dataProvider = CGDataProvider.init(data: imageData as! CFData)
     var cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
     var image = UIImage.init(cgImage: cgImageRef!, scale: 1.0, orientation: .right)
     // do something with image
    })
 }
Share:
11,542
9kv
Author by

9kv

Updated on June 04, 2022

Comments

  • 9kv
    9kv almost 2 years

    I'm trying to capture the image and save it to a variable when I press "myButton". What should I do?

    My code is as follows:

    import UIKit
    import AVFoundation
    import MobileCoreServices
    
    class ViewController: UIViewController {
    
        let captureSession = AVCaptureSession()
        var previewLayer : AVCaptureVideoPreviewLayer?
        var captureDevice : AVCaptureDevice?
        @IBOutlet var myTap: UITapGestureRecognizer!
        @IBOutlet weak var myButton: UIButton!
    
        @IBAction func shotPress(sender: UIButton) {
            //Save image to variable somehow
            })
            var stillImageOutput = AVCaptureStillImageOutput()
            stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    
            if captureSession.canAddOutput(stillImageOutput) {
                captureSession.addOutput(stillImageOutput)
            }
        }
    
        override func viewDidLoad() {
            super.viewDidLoad()
            captureSession.sessionPreset = AVCaptureSessionPresetHigh
            let devices = AVCaptureDevice.devices()
            for device in devices {
                if (device.hasMediaType(AVMediaTypeVideo)) {
                    if(device.position == AVCaptureDevicePosition.Back) {
                        captureDevice = device as? AVCaptureDevice
                        if captureDevice != nil {
                            beginSession()
                        }
                    }
                }
            }
        }
    
        func updateDeviceSettings(focusValue : Float, isoValue : Float) {
            if let device = captureDevice {
                if(device.lockForConfiguration(nil)) {
                    device.focusMode = AVCaptureFocusMode.ContinuousAutoFocus
                    device.unlockForConfiguration()
                }
            }
        }
    
        func beginSession() {
            var err : NSError? = nil
            captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
            if err != nil {
                println("error: \(err?.localizedDescription)")
            }
            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
            self.view.layer.addSublayer(previewLayer)
            self.view.bringSubviewToFront(myButton)
            previewLayer?.frame = self.view.layer.frame
            captureSession.startRunning()
        }
    
    }
    
    • Jan
      Jan over 9 years
      What you basically have to do is trying to grab the samplebuffer for this image. You can do this by calling func captureStillImageAsynchronouslyFromConnection(_ connection: AVCaptureConnection!,completionHandler handler: ((CMSampleBuffer!,NSError!) -> Void)!) on your AVCaptureStillImageOutput. Pass a completion handler, in which you'll do what you want with your image.
  • Marcus Gabilheri
    Marcus Gabilheri over 8 years
    Nice answer. May I ask a question? What is the kCGRenderingDefault?
  • Morganster
    Morganster over 8 years
    hello, you can replace the lines below imaged data with var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation‌​(sampleBuffer) var image = UIImage(data: imageData,scale:1.0) //Save the captured preview to image UIImageWriteToSavedPhotosAlbum(image!, nil, nil, nil)
  • iksnae
    iksnae over 8 years
    @MarcusGabilheri i think u want: CGColorRenderingIntent.RenderingIntentDefault