在Swift中使用实时过滤器录制视频 [英] Recording videos with real-time filters in Swift

查看:91
本文介绍了在Swift中使用实时过滤器录制视频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我是swift的新手,并尝试构建一个可以应用实时过滤器的相机应用程序,并使用应用的过滤器进行保存。



到目前为止,我可以使用应用的过滤器实时预览,但是当我将视频保存为全黑时。

  import UIKit 
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos

class ViewController:UIViewController ,AVCaptureVideoDataOutputSampleBufferDelegate {

var captureSession:AVCaptureSession!

@IBOutlet weak var previewView:UIView!
@IBOutlet weak var recordButtton:UIButton!
@IBOutlet weak var imageView:UIImageView!

var assetWriter:AVAssetWriter?
var assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor?
var isWriting = false
var currentSampleTime:CMTime?
var currentVideoDimensions:CMVideoDimensions?

覆盖func viewDidLoad(){
super.viewDidLoad()
FilterVendor.register()
setupCaptureSession()
}

覆盖func didReceiveMemoryWarning(){
super.didReceiveMemoryWarning()
}

func setupCaptureSession(){
let captureSession = AVCaptureSession()
captureSession .sessionPreset = AVCaptureSessionPresetPhoto

guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo),let input = try? AVCaptureDeviceInput(device:captureDevice)else {
print(无法访问相机)
返回
}

如果captureSession.canAddInput(输入){
captureSession.addInput(输入)
}

let videoOutput = AVCaptureVideoDataOutput()

videoOutput.setSampleBufferDelegate(self,queue:DispatchQueue.main)
if captureSession.canAddOutput(videoOutput){
captureSession.addOutput(videoOutput)
}

let previewLayer = AVCaptureVideoPreviewLayer(session:captureSession)
if((previewLayer )!= nil){
view.layer.addSublayer(previewLayer!)
}

captureSession.startRunning()
}

@IBAction func record(_ sender:Any){
if isWriting {
print(stop record)
self.isWriting = false
assetWriterPixelBu fferInput = nil
assetWriter?.finishWriting(completionHandler:{[unowned self]() - >无效
self.saveMovieToCameraRoll()
})
} else {
print(start record)
createWriter()
assetWriter?.startWriting ()
assetWriter?.startSession(atSourceTime:currentSampleTime!)
isWriting = true
}
}

func saveMovieToCameraRoll(){
PHPhotoLibrary.shared()。performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL:self.movi​​eURL()as URL)
}){saved,
如果保存错误{
打印(已保存)
}
}
}

func movieURL() - > NSURL {
let tempDir = NSTemporaryDirectory()
let url = NSURL(fileURLWithPath:tempDir).appendingPathComponent(tmpMov.mov)
return url! as NSURL
}

func checkForAndDeleteFile(){
let fm = FileManager.default
let url = movieURL()
let exists = fm.fileExists (atPath:url.path!)

如果存在{
do {
try fm.removeItem(at:url as URL)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}

func createWriter(){
self.checkForAndDeleteFile()

do {
assetWriter =尝试AVAssetWriter(outputURL:movieURL()as URL,fileType:AVFileTypeQuickTimeMovie)
} catch let error as NSError {
print(error.localizedDescription)
返回
}

let outputSettings = [
AVVideoCodecKey:AVVideoCodecH264,
AVVideoWidthKey:Int(currentVideoDimensions!.width),
AVVideoHei ghtKey:Int(currentVideoDimensions!.height)
] as [String:Any]

let assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo,outputSettings:outputSettings as? [String:AnyObject])
assetWriterVideoInput.expectsMediaDataInRealTime = true
assetWriterVideoInput.transform = CGAffineTransform(rotationAngle:CGFloat(M_PI / 2.0))

let sourcePixelBufferAttributesDictionary = [
String(kCVPixelBufferPixelFormatTypeKey):Int(kCVPixelFormatType_32BGRA),
String(kCVPixelBufferWidthKey):Int(currentVideoDimensions!.width),
String(kCVPixelBufferHeightKey):Int(currentVideoDimensions!.height),
String( kCVPixelFormatOpenGLESCompatibility):[String:Any]

assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput,
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary)

if assetWriter! .canAdd(assetWriterVideoInput){
assetWr iter!.add(assetWriterVideoInput)
} else {
print(no way \(assetWriterVideoInput))
}
}

func captureOutput (_ captureOutput:AVCaptureOutput,didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,from connection:AVCaptureConnection){
autoreleasepool {

connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)else {return}
let cameraImage = CIImage(cvPixelBuffer:pixelBuffer)

let filter = CIFilter(name:Fİlter )!
filter.setValue(cameraImage,forKey:kCIInputImageKey)


let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

if self.isWriting {
if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
var newPixelBuffer:CVPixelBuffer? = nil

CVPixelBufferPoolCreatePixelBuffer(nil,self.assetWriterPixelBufferInput!.pixelBufferPool!,& newPixelBuffer)

let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!,withPresentationTime:self .currentSampleTime!)

如果成功== false {
print(Pixel Buffer failed)
}
}
}

DispatchQueue.main.async {

如果让outputValue = filter.value(forKey:kCIOutputImageKey)为? CIImage {
let filteredImage = UIImage(ciImage:outputValue)
self.imageView.image = filteredImage
}
}
}
}
}


解决方案

我在关键部分添加了一些评论下面:

  func captureOutput(_ captureOutput:AVCaptureOutput,didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,from connection:AVCaptureConnection){
autoreleasepool {

connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

//评论:这条线很有意义 - 这是你的相机像素缓冲区。
guard让pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)else {return}

//评论:好的,所以你把pixelBuffer变成CIImage ...
让cameraImage = CIImage(cvPixelBuffer) :pixelBuffer)

//评论:现在你用过滤器指令创建一个CIImage ...
let filter = CIFilter(name:Fİlter)!
filter.setValue(cameraImage,forKey:kCIInputImageKey)


let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

if self.isWriting {
if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
//评论:这就是奇怪的地方。你已经声明了一个新的,空的pixelBuffer ...但你已经有一个(pixelBuffer)包含你想要写的图像...
var newPixelBuffer:CVPixelBuffer? = nil

//评论:你从池中抓取了内存。
CVPixelBufferPoolCreatePixelBuffer(nil,self.assetWriterPixelBufferInput!.pixelBufferPool!,& newPixelBuffer)

//评论:现在你写了一个空像素缓冲区< - 这就是造成黑色的原因帧。
let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!,withPresentationTime:self.currentSampleTime!)

if success == false {
print(Pixel Buffer failed)
}
}
}

//评论:现在您将过滤后的图像发送回屏幕。
DispatchQueue.main.async {

如果让outputValue = filter.value(forKey:kCIOutputImageKey)为? CIImage {
let filteredImage = UIImage(ciImage:outputValue)
self.imageView.image = filteredImage
}
}
}
}

在我看来,你基本上是在获取屏幕图像,创建过滤后的副本,然后制作一个新的像素缓冲区这是空的并写出来。



如果您编写的像素缓冲区而不是您正在创建的新像素,则应该成功写入图像。



成功写出过滤后的视频需要的是从CIImage创建一个新的CVPixelBuffer - 这个解决方案已存在于StackOverflow上,我知道因为我自己需要这个步骤! / p>

I am new to swift and trying to build a camera app which can apply real-time filters, and save with the applied filters.

So far i can preview real-time with the applied filters, but when i save the video its all black.

import UIKit
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {

    var captureSession: AVCaptureSession!

    @IBOutlet weak var previewView: UIView!
    @IBOutlet weak var recordButtton: UIButton!
    @IBOutlet weak var imageView: UIImageView!

    var assetWriter: AVAssetWriter?
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
    var isWriting = false
    var currentSampleTime: CMTime?
    var currentVideoDimensions: CMVideoDimensions?

    override func viewDidLoad() {
        super.viewDidLoad()
        FilterVendor.register()
        setupCaptureSession()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func setupCaptureSession() {
        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else {
            print("Can't access the camera")
            return
        }

        if captureSession.canAddInput(input) {
            captureSession.addInput(input)
        }

        let videoOutput = AVCaptureVideoDataOutput()

        videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        if((previewLayer) != nil) {
            view.layer.addSublayer(previewLayer!)
        }

        captureSession.startRunning()
    }

    @IBAction func record(_ sender: Any) {
        if isWriting {
            print("stop record")
            self.isWriting = false
            assetWriterPixelBufferInput = nil
            assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
                self.saveMovieToCameraRoll()
            })
        } else {
            print("start record")
            createWriter()
            assetWriter?.startWriting()
            assetWriter?.startSession(atSourceTime: currentSampleTime!)
            isWriting = true
        }
    }

    func saveMovieToCameraRoll() {
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL)
        }) { saved, error in
            if saved {
                print("saved")
            }
        }
    }

    func movieURL() -> NSURL {
        let tempDir = NSTemporaryDirectory()
        let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov")
        return url! as NSURL
    }

    func checkForAndDeleteFile() {
        let fm = FileManager.default
        let url = movieURL()
        let exist = fm.fileExists(atPath: url.path!)

        if exist {
            do {
                try fm.removeItem(at: url as URL)
            } catch let error as NSError {
                print(error.localizedDescription)
            }
        }
    }

    func createWriter() {
        self.checkForAndDeleteFile()

        do {
            assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie)
        } catch let error as NSError {
            print(error.localizedDescription)
            return
        }

        let outputSettings = [
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : Int(currentVideoDimensions!.width),
            AVVideoHeightKey : Int(currentVideoDimensions!.height)
        ] as [String : Any]

        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject])
        assetWriterVideoInput.expectsMediaDataInRealTime = true
        assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))

        let sourcePixelBufferAttributesDictionary = [
            String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
            String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
            String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
            String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
        ] as [String : Any]

        assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
                                                                           sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if assetWriter!.canAdd(assetWriterVideoInput) {
            assetWriter!.add(assetWriterVideoInput)
        } else {
            print("no way\(assetWriterVideoInput)")
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
        autoreleasepool {

            connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

            guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
            let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

            let filter = CIFilter(name: "Fİlter")!
            filter.setValue(cameraImage, forKey: kCIInputImageKey)


            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
            self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
            self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

            if self.isWriting {
                if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                    var newPixelBuffer: CVPixelBuffer? = nil

                    CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                    let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                    if success == false {
                        print("Pixel Buffer failed")
                    }
                }
            }

            DispatchQueue.main.async {

                if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                    let filteredImage = UIImage(ciImage: outputValue)
                    self.imageView.image = filteredImage
                }
            }
        }
    }
}

解决方案

I've added some comments to the critical part below:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
    autoreleasepool {

        connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

        // COMMENT: This line makes sense - this is your pixelbuffer from the camera.
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

        // COMMENT: OK, so you turn pixelBuffer into a CIImage...
        let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

        // COMMENT: And now you've create a CIImage with a Filter instruction...
        let filter = CIFilter(name: "Fİlter")!
        filter.setValue(cameraImage, forKey: kCIInputImageKey)


        let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
        self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
        self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

        if self.isWriting {
            if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write...
                var newPixelBuffer: CVPixelBuffer? = nil

                // COMMENT: And you grabbed memory from the pool.
                CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame.
                let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                if success == false {
                    print("Pixel Buffer failed")
                }
            }
        }

        // COMMENT: And now you're sending the filtered image back to the screen.
        DispatchQueue.main.async {

            if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                let filteredImage = UIImage(ciImage: outputValue)
                self.imageView.image = filteredImage
            }
        }
    }
}

It looks to me like you're basically getting the screen image, creating a filtered copy, then making a NEW pixel buffer which is empty and writing that out.

If you write the pixelBuffer you grabbed instead of the new one you're creating, you should successfully write the image.

What you need to successfully write out the filtered video is to create a new CVPixelBuffer from a CIImage - that solution exists here on StackOverflow already, I know because I needed that step myself!

这篇关于在Swift中使用实时过滤器录制视频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆