Faire un UIImage à partir D'un CMSampleBuffer

Ce n'est pas le même que les innombrables questions sur la conversion d'un CMSampleBufferUIImage. Je me demande simplement pourquoi je ne peux pas le convertir comme ceci:

CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage * imageFromCoreImageLibrary = [CIImage imageWithCVPixelBuffer: pixelBuffer];
UIImage * imageForUI = [UIImage imageWithCIImage: imageFromCoreImageLibrary];

il semble beaucoup plus simple parce qu'il fonctionne pour les espaces de couleur YCbCr, ainsi que RGBA et d'autres. Il y a un problème avec ce code?

13
demandé sur JasonMArcher 2013-03-31 09:49:13

7 réponses

avec Swift 3 et iOS 10 AVCapturePhotoOutput : Comprend :

import UIKit
import CoreData
import CoreMotion
import AVFoundation

créer un uivi pour prévisualiser et le lier à la classe principale

  @IBOutlet var preview: UIView!

Créer cette configuration de la caméra session ( kCVPixelFormatType_32BGRA est important !!):

  lazy var cameraSession: AVCaptureSession = {
    let s = AVCaptureSession()
    s.sessionPreset = AVCaptureSessionPresetHigh
    return s
  }()

  lazy var previewLayer: AVCaptureVideoPreviewLayer = {
    let previewl:AVCaptureVideoPreviewLayer =  AVCaptureVideoPreviewLayer(session: self.cameraSession)
    previewl.frame = self.preview.bounds
    return previewl
  }()

  func setupCameraSession() {
    let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) as AVCaptureDevice

    do {
      let deviceInput = try AVCaptureDeviceInput(device: captureDevice)

      cameraSession.beginConfiguration()

      if (cameraSession.canAddInput(deviceInput) == true) {
        cameraSession.addInput(deviceInput)
      }

      let dataOutput = AVCaptureVideoDataOutput()
      dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: **kCVPixelFormatType_32BGRA** as UInt32)]
      dataOutput.alwaysDiscardsLateVideoFrames = true

      if (cameraSession.canAddOutput(dataOutput) == true) {
        cameraSession.addOutput(dataOutput)
      }

      cameraSession.commitConfiguration()

      let queue = DispatchQueue(label: "fr.popigny.videoQueue", attributes: [])
      dataOutput.setSampleBufferDelegate(self, queue: queue)

    }
    catch let error as NSError {
      NSLog("\(error), \(error.localizedDescription)")
    }
  }

In WillAppear:

  override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    setupCameraSession()
  }

In Didappear:

  override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(animated)
    preview.layer.addSublayer(previewLayer)
    cameraSession.startRunning()
  }

Créer une fonction de capture de sortie :

  func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    // Here you collect each frame and process it
    let ts:CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
    self.mycapturedimage = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
}

Voici le code permettant de convertir un kCVPixelFormatType_32BGRA CMSampleBuffer à un UIImage les éléments clés est la bitmapInfo cela doit correspondre à 32BGRA 32 peu avec premultfirst et alpha info :

  func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> UIImage
  {
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);


    // Get the number of bytes per row for the pixel buffer
    let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);

    // Get the number of bytes per row for the pixel buffer
    let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
    // Get the pixel buffer width and height
    let width = CVPixelBufferGetWidth(imageBuffer!);
    let height = CVPixelBufferGetHeight(imageBuffer!);

    // Create a device-dependent RGB color space
    let colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
    bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
    //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
    let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
    // Create a Quartz image from the pixel data in the bitmap graphics context
    let quartzImage = context?.makeImage();
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);

    // Create an image object from the Quartz image
    let image = UIImage.init(cgImage: quartzImage!);

    return (image);
  }
14
répondu Popigny 2016-10-22 14:35:15

Swift 4:

let buff: CMSampleBuffer ...            // Have you have CMSampleBuffer 
if let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buff, previewPhotoSampleBuffer: nil) {
    let image = UIImage(data: imageData) //  Here you have UIImage
}
20
répondu Alexander Volkov 2018-03-12 03:05:53

utilisez le code suivant pour convertir l'image de PixelBuffer Option 1:

CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];

CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef myImage = [context
                         createCGImage:ciImage
                         fromRect:CGRectMake(0, 0,
                                             CVPixelBufferGetWidth(pixelBuffer),
                                             CVPixelBufferGetHeight(pixelBuffer))];

UIImage *uiImage = [UIImage imageWithCGImage:myImage];

Option 2:

int w = CVPixelBufferGetWidth(pixelBuffer);
int h = CVPixelBufferGetHeight(pixelBuffer);
int r = CVPixelBufferGetBytesPerRow(pixelBuffer);
int bytesPerPixel = r/w;

unsigned char *buffer = CVPixelBufferGetBaseAddress(pixelBuffer);

UIGraphicsBeginImageContext(CGSizeMake(w, h));

CGContextRef c = UIGraphicsGetCurrentContext();

unsigned char* data = CGBitmapContextGetData(c);
if (data != NULL) {
    int maxY = h;
    for(int y = 0; y<maxY; y++) {
        for(int x = 0; x<w; x++) {
            int offset = bytesPerPixel*((w*y)+x);
            data[offset] = buffer[offset];     // R
            data[offset+1] = buffer[offset+1]; // G
            data[offset+2] = buffer[offset+2]; // B
            data[offset+3] = buffer[offset+3]; // A
        }
    }
}
UIImage *img = UIGraphicsGetImageFromCurrentImageContext();

UIGraphicsEndImageContext();
12
répondu Dipen Panchasara 2013-03-31 05:56:48

j'ai écrit une extension simple à utiliser avec Swift 4.x / 3.x pour produire un UIImage dans un CMSampleBuffer.

cela gère également l'échelle et l'orientation, bien que vous puissiez accepter les valeurs par défaut si elles fonctionnent pour vous.

import UIKit
import AVFoundation

extension CMSampleBuffer {
    func image(orientation: UIImageOrientation = .up, 
               scale: CGFloat = 1.0) -> UIImage? {
        if let buffer = CMSampleBufferGetImageBuffer(self) {
            let ciImage = CIImage(cvPixelBuffer: buffer)

            return UIImage(ciImage: ciImage, 
                           scale: scale,
                           orientation: orientation)
        }

        return nil
    }
}
  1. si elle peut obtenir des données de tampon à partir de l'image, elle va continuer, sinon nil est retourné
  2. en utilisant le buffer, il initialise un CIImage
  3. Elle retourne un UIImage initialisé avec la ciImage avec la valeur scale& orientation valeurs. Si aucun n'est fourni, les valeurs par défaut de up et 1.0 sont utilisés respectivement
5
répondu CodeBender 2018-08-03 04:03:44

cela va être beaucoup en rapport avec la classe iOS 10 AVCapturePhotoOutput. Supposons que l'utilisateur veut snap une photo et vous appelez capturePhoto(with:delegate:) et vos paramètres comprennent une requête pour un aperçu image. C'est une façon très efficace d'obtenir une image de prévisualisation, mais comment allez-vous l'afficher dans votre interface? L'image preview arrive comme un CMSampleBuffer dans votre implémentation de la méthode delegate:

func capture(_ output: AVCapturePhotoOutput, 
    didFinishProcessingPhotoSampleBuffer buff: CMSampleBuffer?, 
    previewPhotoSampleBuffer: CMSampleBuffer?, 
    resolvedSettings: AVCaptureResolvedPhotoSettings, 
    bracketSettings: AVCaptureBracketedStillImageSettings?, 
    error: Error?) {

vous devez transformer un CMSampleBuffer,previewPhotoSampleBuffer dans une UIImage. Comment allez-vous faire? Comme ceci:

if let prev = previewPhotoSampleBuffer {
    if let buff = CMSampleBufferGetImageBuffer(prev) {
        let cim = CIImage(cvPixelBuffer: buff)
        let im = UIImage(ciImage: cim)
        // and now you have a UIImage! do something with it ...
    }
}
2
répondu matt 2016-10-02 14:45:29

TO ALL: ne pas utiliser les méthodes de la forme:

    private let context = CIContext()

    private func imageFromSampleBuffer2(_ sampleBuffer: CMSampleBuffer) -> UIImage? {
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
        let ciImage = CIImage(cvPixelBuffer: imageBuffer)
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
        return UIImage(cgImage: cgImage)
    }

ils mangent beaucoup plus de cpu, plus de temps pour convertir

utiliser la solution de https://stackoverflow.com/a/40193359/7767664

n'oubliez pas d'ajuster le réglage suivant pour AVCaptureVideoDataOutput

    videoOutput = AVCaptureVideoDataOutput()

    videoOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
    //videoOutput.alwaysDiscardsLateVideoFrames = true

    videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "MyQueue"))

convertir méthode

    func imageFromSampleBuffer(_ sampleBuffer : CMSampleBuffer) -> UIImage {
        // Get a CMSampleBuffer's Core Video image buffer for the media data
        let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        // Lock the base address of the pixel buffer
        CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);


    // Get the number of bytes per row for the pixel buffer
    let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);

    // Get the number of bytes per row for the pixel buffer
    let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
    // Get the pixel buffer width and height
    let width = CVPixelBufferGetWidth(imageBuffer!);
    let height = CVPixelBufferGetHeight(imageBuffer!);

    // Create a device-dependent RGB color space
    let colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
    bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
    //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
    let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
    // Create a Quartz image from the pixel data in the bitmap graphics context
    let quartzImage = context?.makeImage();
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);

    // Create an image object from the Quartz image
    let image = UIImage.init(cgImage: quartzImage!);

    return (image);
}
1
répondu user924 2018-03-03 12:50:41

une version Swift 4 / iOS 11 de la réponse de Popigny:

import Foundation
import AVFoundation
import UIKit

class ViewController : UIViewController {
    let captureSession = AVCaptureSession()
    let photoOutput = AVCapturePhotoOutput()
    let cameraPreview = UIView(frame: .zero)
    let progressIndicator = ProgressIndicator()

    override func viewDidLoad() {
        super.viewDidLoad()

        setupVideoPreview()

        do {
            try setupCaptureSession()
        } catch {
            let errorMessage = String(describing:error)
            print("[--ERROR--]: \(#file):\(#function):\(#line): " + errorMessage)
            alert(title: "Error", message: errorMessage)
        }
    }

    private func setupCaptureSession() throws {
        let deviceDiscovery = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
        let devices = deviceDiscovery.devices

        guard let captureDevice = devices.first else {
            let errorMessage = "No camera available"
            print("[--ERROR--]: \(#file):\(#function):\(#line): " + errorMessage)
            alert(title: "Error", message: errorMessage)
            return
        }

        let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
        captureSession.addInput(captureDeviceInput)
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
        captureSession.startRunning()

        if captureSession.canAddOutput(photoOutput) {
            captureSession.addOutput(photoOutput)
        }
    }

    private func setupVideoPreview() {

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer.bounds = view.bounds
        previewLayer.position = CGPoint(x:view.bounds.midX, y:view.bounds.midY)
        previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill

        cameraPreview.layer.addSublayer(previewLayer)
        cameraPreview.addGestureRecognizer(UITapGestureRecognizer(target: self, action:#selector(capturePhoto)))

        cameraPreview.translatesAutoresizingMaskIntoConstraints = false

        view.addSubview(cameraPreview)

        let viewsDict = ["cameraPreview":cameraPreview]
        view.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "V:|-0-[cameraPreview]-0-|", options: [], metrics: nil, views: viewsDict))
        view.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "H:|-0-[cameraPreview]-0-|", options: [], metrics: nil, views: viewsDict))

    }

    @objc func capturePhoto(_ sender: UITapGestureRecognizer) {
        progressIndicator.add(toView: view)
        let photoOutputSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey:AVVideoCodecType.jpeg])
        photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
    }

    func saveToPhotosAlbum(_ image: UIImage) {
        UIImageWriteToSavedPhotosAlbum(image, self, #selector(photoWasSavedToAlbum), nil)
    }

    @objc func photoWasSavedToAlbum(_ image: UIImage, _ error: Error?, _ context: Any?) {
        alert(message: "Photo saved to device photo album")
    }

    func alert(title: String?=nil, message:String?=nil) {
        let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
        alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
        present(alert, animated:true)
    }

}

extension ViewController : AVCapturePhotoCaptureDelegate {
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {

        guard  let photoData = photo.fileDataRepresentation() else {
            let errorMessage = "Photo capture did not provide output data"
            print("[--ERROR--]: \(#file):\(#function):\(#line): " + errorMessage)
            alert(title: "Error", message: errorMessage)
            return
        }

        guard let image = UIImage(data: photoData) else {
            let errorMessage = "could not create image to save"
            print("[--ERROR--]: \(#file):\(#function):\(#line): " + errorMessage)
            alert(title: "Error", message: errorMessage)
            return
        }

        saveToPhotosAlbum(image)

        progressIndicator.hide()
    }
}

un projet d'exemple complet pour voir ceci en contexte:https://github.com/cruinh/CameraCapture

-1
répondu Cruinh 2018-02-16 19:59:46