其他分享
首页 > 其他分享> > swift 实现可以对焦、手电筒的自定义相机

swift 实现可以对焦、手电筒的自定义相机

作者:互联网

需求:swift 实现可以对焦、手电筒的自定义相机 

主要是如何获取清晰图片,获取到图片后进行裁剪,至于裁剪的是正方形,还是其他比例,自己根据自己的需求做调整即可

 

主要思路

1、布局UI

2、检测权限

3、开始会话

4、对焦,打开手电,拍出喜欢的图片

5、裁剪图片

6、获取后回调,并停止会话

7、返回,没有了

话不多说,(代码及文档)大佬请看代码

import UIKit
import AVFoundation
// 拍照后的回调
protocol DhCameraVCDelegate: class {
    func didSelectedImage( _ image: UIImage)
}

class DhCameraVC: DhViewController ,UIGestureRecognizerDelegate{

    weak var delegate: DhCameraVCDelegate?
    var guideImageView: UIImageView!
    var cameraPreviewView: UIView!
    var captureButton: UIButton!
    var focusView: UIView!
    var chongPaiButton: UIButton!
    var queDingButton: UIButton!
    var shouDianButton: UIButton!
    var istorchOn : Bool!
    var captureSession = AVCaptureSession()
    var previewLayer: CALayer!
    var captureDevice: AVCaptureDevice!
    /// 当用户单击click photo按钮时,这将为真
    var takePhoto = false
    
    ///
    override func viewDidLoad() {
        super.viewDidLoad()
        self.navItem.title = "自定义相机拍照"
        cameraPreviewView = UIView().then{
            $0.frame = CGRect(x:0, y: NAVH, width: WIN_WIDTH, height: WIN_WIDTH)
        }
        self.view .addSubview(cameraPreviewView)
        // 设置三等分的线
        let ww  = (WIN_WIDTH - 2) / 3.0
        let linView0 = UIView().then{
            $0.backgroundColor = .white
            $0.frame = CGRect(x:ww, y: NAVH, width: 1, height: WIN_WIDTH)
        }
        self.view.addSubview(linView0)
        
        let linView1 = UIView().then{
            $0.backgroundColor = .white
            $0.frame = CGRect(x:ww * 2 + 1, y: NAVH, width: 1, height: WIN_WIDTH)
        }
        self.view.addSubview(linView1)
        
        let linView2 = UIView().then{
            $0.backgroundColor = .white
            $0.frame = CGRect(x:0, y: NAVH + ww, width: WIN_WIDTH, height: 1)
        }
        self.view.addSubview(linView2)
        
        let linView3 = UIView().then{
            $0.backgroundColor = .white
            $0.frame = CGRect(x:0, y:NAVH + ww * 2 + 1, width: WIN_WIDTH, height: 1)
        }
        self.view.addSubview(linView3)
        
        // 预览UIImageView
        guideImageView = UIImageView().then{
            $0.frame = CGRect(x:0, y: NAVH, width: WIN_WIDTH, height: WIN_WIDTH)
        }
        self.view .addSubview(guideImageView)
        
        // 拍照btn
        captureButton = UIButton.buttonWith(imageName:"", titleColor: .white, titleFont: fontMedium_size(16), backgroundColor: C1, title: "拍照")
        captureButton.frame = CGRect(x: WIN_WIDTH * 0.5 - 25, y: cameraPreviewView.bottom + 2 * NAVH, width: 50, height: 40)
        self.view .addSubview(captureButton)
        captureButton.addTarget(self, action: #selector(didTapClick), for: .touchUpInside)
        captureButton.layer.cornerRadius = 2
        
        // 重拍btn
        chongPaiButton = UIButton.buttonWith(imageName:"", titleColor: .white, titleFont: fontMedium_size(16), backgroundColor: C1, title: "重拍")
        chongPaiButton.frame = CGRect(x: Interval, y: cameraPreviewView.bottom + 2 * NAVH, width: 50, height: 40)
        self.view .addSubview(chongPaiButton)
        chongPaiButton.addTarget(self, action: #selector(chongPaiButtonClick), for: .touchUpInside)
        chongPaiButton.layer.cornerRadius = 2
        
        // 确定btn
        queDingButton = UIButton.buttonWith(imageName:"", titleColor: .white, titleFont: fontMedium_size(16), backgroundColor: C1, title: "确定")
        queDingButton.frame = CGRect(x: WIN_WIDTH - 50 - Interval, y: cameraPreviewView.bottom + 2 * NAVH, width: 50, height: 40)
        self.view .addSubview(queDingButton)
        queDingButton.layer.cornerRadius = 2
        queDingButton.addTarget(self, action: #selector(queDingButtonClick), for: .touchUpInside)
        
        // 手电开关,在导航栏右上角
        shouDianButton = UIButton.buttonWith(imageName:"", titleColor: C1, titleFont: fontMedium_size(16), backgroundColor: nil, title: "手电开")
        shouDianButton.frame = CGRect(x: WIN_WIDTH - 70 - Interval, y: STAH, width: 70, height: 40)
        self.navigationBar.addSubview(shouDianButton)
        shouDianButton.addTarget(self, action: #selector(shouDianButtonClick), for: .touchUpInside)
        
        captureSession = AVCaptureSession()
        previewLayer = CALayer()
        takePhoto = false

        requestAuthorization() // 将请求授权,如果授权则启动相机
        
    }
    
    /// 将请求授权,如果授权则启动相机。
    private func requestAuthorization() {
        switch AVCaptureDevice.authorizationStatus(for: AVMediaType.video) {
        case .authorized:
            prepareCamera()

        case .denied, .restricted, .notDetermined:
            AVCaptureDevice.requestAccess(for: AVMediaType.video, completionHandler: { (granted) in
                if !Thread.isMainThread {
                    DispatchQueue.main.async {
                        if granted {
                            self.prepareCamera()
                        } else {
                            self.showSetPrivacyAlert()
                        }
                    }
                } else {
                    if granted {
                        self.prepareCamera()
                    } else {
                        self.showSetPrivacyAlert()
                    }
                }
            })
        }
    }
    
    //
    func showSetPrivacyAlert() {
        let alert = UIAlertController(title: "无法进入相机", message: "请去设置隐私并允许此应用程序访问相机", preferredStyle: UIAlertController.Style.alert)
        alert.addAction(UIAlertAction(title: "好的", style: .default, handler: {_ in
            self.navigationController?.popToRootViewController(animated: true)
        }))
        self.present(alert, animated: true, completion: nil)
    }
    
    // 将查看主摄像头是否可访问,如果找到,将调用将可用设备指定到AVCaptureDevice的方法。
    private func prepareCamera() {
        // 重置会话 Resets the session.
        self.captureSession.sessionPreset = AVCaptureSession.Preset.photo

        if #available(iOS 10.0, *) {
            let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices
            self.assignCamera(availableDevices)
        } else {
           
            if let availableDevices = AVCaptureDevice.default(for: AVMediaType.video) {
                self.assignCamera([availableDevices])
            } else {
                self.showAlert()
            }
        }
    }
    
    // 无法进入相机
    func showAlert() {
        let alert = UIAlertController(title: "无法进入相机", message: "看来你的设备要么没有摄像头要么坏了", preferredStyle: .alert)
        alert.addAction(UIAlertAction(title: "取消", style: .cancel, handler: {_ in
            self.navigationController?.dismiss(animated: true, completion: nil)
        }))
        self.present(alert, animated: true, completion: nil)
    }
    
    /// 将AVCaptureDevice分配给指定的变量,将开始会话
    /// - Parameter availableDevices: [AVCaptureDevice]
    private func assignCamera(_ availableDevices: [AVCaptureDevice]) {
        if availableDevices.first != nil {
            captureDevice = availableDevices.first
            beginSession()
        } else {
            self.showAlert()
        }
    }
    
   // 配置相机设置并开始会话,此函数将负责在UI上显示图像
    private func beginSession() {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
            captureSession.addInput(captureDeviceInput)
        } catch {
            print(error.localizedDescription)
        }
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        self.previewLayer = previewLayer
        self.previewLayer.frame = self.cameraPreviewView.frame
        self.previewLayer.frame.origin.y = 0
        (self.previewLayer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravity.resizeAspectFill
        self.previewLayer.masksToBounds = true
        self.cameraPreviewView.clipsToBounds = true
        self.cameraPreviewView.layer.addSublayer(self.previewLayer)
        captureSession.startRunning()

        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.videoSettings = [((kCVPixelBufferPixelFormatTypeKey as NSString) as String):NSNumber(value:kCVPixelFormatType_32BGRA)]

        dataOutput.alwaysDiscardsLateVideoFrames = true

        if captureSession.canAddOutput(dataOutput) {
            captureSession.addOutput(dataOutput)
        }

        captureSession.commitConfiguration()

        let queue = DispatchQueue(label: "com.letsappit.camera")
        dataOutput.setSampleBufferDelegate(self, queue: queue)

        self.userinteractionToButton(true)
        
        // 对焦
        focusView = UIView(frame: CGRect(x: WIN_WIDTH * 0.5, y: NAVH + WIN_WIDTH * 0.5, width: 60, height: 60))
        focusView?.layer.borderWidth = 1
        focusView?.layer.borderColor = C1.cgColor
        if let focusView = focusView {
            self.cameraPreviewView.addSubview(focusView)
        }
        focusView?.isHidden = true
        
        let tapGesture = UITapGestureRecognizer(target: self, action: #selector(focusGesture(_:)))
        tapGesture.delegate = self
        self.cameraPreviewView.addGestureRecognizer(tapGesture)
        
        // 手电默认关闭
        istorchOn = false
    }
    
    // 对焦点击
    @objc func focusGesture(_ gesture: UITapGestureRecognizer?) {
        let point = (gesture?.location(in: gesture?.view))!
        tofocusPoint (point: point)
    }
    
    // 对焦
    func tofocusPoint (point:CGPoint){
        do {
            let focusPoint = CGPoint(x: point.y / WIN_WIDTH, y: 1 - point.x / WIN_WIDTH)
            try captureDevice.lockForConfiguration()

            if captureDevice.isFocusModeSupported(.autoFocus) {
                captureDevice.focusPointOfInterest = focusPoint
                captureDevice.focusMode = .autoFocus
            }

            if captureDevice.isExposureModeSupported(.autoExpose) {
                captureDevice.exposurePointOfInterest = focusPoint
                // 曝光量调节
                captureDevice.exposureMode = .autoExpose
            }
            captureDevice.unlockForConfiguration()
        } catch {
        }
        focusView.center = point
        focusView.isHidden = false
        weak var weakSelf = self
        UIView.animate(withDuration: 0.3, animations: {
            weakSelf!.focusView.transform = CGAffineTransform(scaleX: 1.25, y: 1.25)
        }) { finished in
            UIView.animate(withDuration: 0.5, animations: {
                weakSelf!.focusView.transform = CGAffineTransform.identity
            }) { finished in
                weakSelf!.focusView.isHidden = true
            }
        }
    }
    
    // 手电
    @objc func shouDianButtonClick(_ sender: Any) {

        do {
            try captureDevice.lockForConfiguration()

            if istorchOn {

                if captureDevice.isTorchModeSupported(.off) {
                    captureDevice.torchMode = .off
                    istorchOn = false
                    shouDianButton.setTitle("手电开", for: .normal)
                }
            } else {
                if captureDevice.isTorchModeSupported(.on) {
                    captureDevice.torchMode = .on
                    istorchOn = true
                    shouDianButton.setTitle("手电关", for: .normal)
                }
            }

            captureDevice.unlockForConfiguration()
        } catch {
        }
    }
    
    /// 从给定的CMSampleBuffer获得UIImage
    /// - Parameter buffer: CMSampleBuffer
    /// - Returns: UIImage?
    func getImageFromSampleBuffer(buffer:CMSampleBuffer, orientation: UIImage.Orientation) -> UIImage? {
         if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
             let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
             let context = CIContext()
            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

             if let image = context.createCGImage(ciImage, from: imageRect) {
                 return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: orientation)

             }
         }
         return nil
     }

    // 停止会话
    func stopCaptureSession() {
        self.captureSession.stopRunning()

        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }

    // 拍照
    @objc func didTapClick(_ sender: Any) {
        userinteractionToButton(false)
        takePhoto = true
    }
    
    private func userinteractionToButton(_ interaction: Bool) {
            captureButton.isEnabled = interaction
    }

    // 重新拍摄
    @objc func chongPaiButtonClick(_ sender: Any) {
        userinteractionToButton(true)
        takePhoto = false
        self.guideImageView.image = nil
    }
    
    // 确定回调
    @objc func queDingButtonClick(_ sender: Any) {
        
        self.stopCaptureSession()
        self.previewLayer.removeFromSuperlayer()
        let newImage = UIImage(data: self.guideImageView.image!.pngData()!)
        delegate?.didSelectedImage(newImage!)
        navigationController?.popViewController(animated: true)
    }
    
    // 检测
    deinit {
        print("DhCameraVC-deinit")
    }
}

extension DhCameraVC: AVCaptureVideoDataOutputSampleBufferDelegate {
    func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

        if connection.isVideoOrientationSupported {
            connection.videoOrientation = .portrait
        }

        if takePhoto {
            takePhoto = false

            // Rotation should be unlocked to work.
            // 回正方向
            var orientation = UIImage.Orientation.up
            switch UIDevice.current.orientation {
            case .landscapeLeft:
                orientation = .left

            case .landscapeRight:
                orientation = .right

            case .portraitUpsideDown:
                orientation = .down

            default:
                orientation = .up
            }

            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer, orientation: orientation) {
                DispatchQueue.main.async {
                    if isIphoneX {
                        // func imageByCropToRect(rect:CGRect, scale:Bool) -> UIImage 为UIImage的extension 用来裁剪想要的图片
                        let newImage = image.imageByCropToRect(rect:CGRect(x: 0, y:self.cameraPreviewView.frame.origin.y - 24, width: self.cameraPreviewView.frame.size.width, height: self.cameraPreviewView.frame.size.height) , scale: true)
                        self.guideImageView.image = newImage
                    }else {
                        let newImage = image.imageByCropToRect(rect: self.cameraPreviewView.frame, scale: true)
                        self.guideImageView.image = newImage
                    }
                }
            }
        }
    }
}

UIImage的extension裁剪图片

关于UIImage的extension 用来裁剪想要的图片见代码,如果你有UIImage的extension了,把这个func放进去就可以了

extension UIImage {
    func imageByCropToRect(rect:CGRect, scale:Bool) -> UIImage {
        var rect = rect
        var scaleFactor: CGFloat = 1.0
        if scale  {
            scaleFactor = self.scale
            rect.origin.x *= scaleFactor
            rect.origin.y *= scaleFactor
            rect.size.width *= scaleFactor
            rect.size.height *= scaleFactor
        }

        var image: UIImage? = nil;
        if rect.size.width > 0 && rect.size.height > 0 {
            let imageRef = self.cgImage!.cropping(to: rect)
            image = UIImage(cgImage: imageRef!, scale: scaleFactor, orientation: self.imageOrientation)
        }

        return image!
    }
}

打完收工!

 

标签:captureDevice,自定义,self,对焦,let,func,var,swift,frame
来源: https://www.cnblogs.com/ljcgood66/p/13949650.html