如何修复使用前置摄像头镜像、AVFoundation、Swift 捕获的图像 [英] How to Fix capturing images with front facing camera mirrors image, AVFoundation,Swift

查看:43
本文介绍了如何修复使用前置摄像头镜像、AVFoundation、Swift 捕获的图像的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

如何修复使用前置摄像头镜像捕获的图像?SnapChat 似乎可以修复它以及 WhatsApp 和 Instagram ,我该怎么办?我真的很想为此找到解决方案...这很烦人...谢谢

How to Fix capturing images with front facing camera mirrors image? SnapChat seems to fix it as well as WhatsApp and Instagram , how can i? i would really love to find a solution for this... its quite of annoying...Thanks ahead

我看到 总是看到从前置摄像头 iOS 5.0 捕获时镜像图像,但它会翻转后置摄像头和前置摄像头的图像,这并不能真正解决任何人的问题.如果有人能帮我弄清楚如何只翻转前置摄像头图像或任何其他很棒的解决方案!

I have seen Always seeing Mirror image while capturing from Front Camera iOS 5.0 but it flip the images both for rear camera and frontal which doesnt solve the problem for anyone really . if anyone can help me figure out how to only flip the front camera images or any other solution that will be great !

import UIKit
import AVFoundation

@available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {

let photoSettings = AVCapturePhotoSettings()
    var audioPlayer = AVAudioPlayer()
    var captureSession = AVCaptureSession()
    var videoDeviceInput: AVCaptureDeviceInput!
    var previewLayer = AVCaptureVideoPreviewLayer()
    var frontCamera: Bool = false
    var captureDevice:AVCaptureDevice!
    var takePhoto = false

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        prepareCamera()
    }

    func prepareCamera() {
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
            captureDevice = availableDevices.first
            beginSession()
        } 
    }

    func frontCamera(_ front: Bool){
        let devices = AVCaptureDevice.devices()

        do{
            try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!)) 
        }catch{
            print("Error")
        }

        for device in devices!{
            if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
                if front{
                    if (device as AnyObject).position == AVCaptureDevicePosition.front {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }else{
                    if (device as AnyObject).position == AVCaptureDevicePosition.back {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }
            }
        }
    }

    func beginSession () {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
            if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            self.previewLayer = previewLayer
            containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
            self.previewLayer.frame = self.view.layer.frame
            self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
            captureSession.startRunning()

            let dataOutput = AVCaptureVideoDataOutput()
            dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]

            dataOutput.alwaysDiscardsLateVideoFrames = true

            if captureSession.canAddOutput(dataOutput) {
                captureSession.addOutput(dataOutput)

                photoSettings.isHighResolutionPhotoEnabled = true
                photoSettings.isAutoStillImageStabilizationEnabled = true
            }

            captureSession.commitConfiguration()

            let queue = DispatchQueue(label: "com.NightOut.captureQueue")
            dataOutput.setSampleBufferDelegate(self, queue: queue) 
        }
    }
        @IBAction func takePhoto(_ sender: Any) {
            takePhoto = true

            photoSettings.isHighResolutionPhotoEnabled = true
            photoSettings.isAutoStillImageStabilizationEnabled = true
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        if takePhoto {
            takePhoto = false
            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
                let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController

                photoVC.takenPhoto = image

                DispatchQueue.main.async {
                    self.present(photoVC, animated: true, completion: {
                        self.stopCaptureSession()
                    })
                }
            }  
        }
    }

    func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let context = CIContext()

            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

            if let image = context.createCGImage(ciImage, from: imageRect) {
                return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
            }
    }
        return nil
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        self.captureSession.stopRunning()
    }

    func stopCaptureSession () {
        self.captureSession.stopRunning()

        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    @IBAction func DismissButtonAction(_ sender: UIButton) {

        UIView.animate(withDuration: 0.1, animations: {
            self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
        }, completion: { (finish) in
            UIView.animate(withDuration: 0.1, animations: {
                self.DismissButton.transform = CGAffineTransform.identity
            })
        })
        performSegue(withIdentifier: "Segue", sender: nil)
    }
}

推荐答案

这是我自己想出来的,解决方案如下:

i figured this out myself, Here is the solution:

if captureDevice.position == AVCaptureDevicePosition.back {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
    }
}
                
if captureDevice.position == AVCaptureDevicePosition.front {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
    }
}

这篇关于如何修复使用前置摄像头镜像、AVFoundation、Swift 捕获的图像的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆