0

in my application there is a facility of capturing photo from both front and rare camera. Both the camera work properly on ios 10.2.1 & less but if I run the application on iOS 10.3.2 and capturing photo from front camera everything work perfectly but while I try to capture photo from rare or back camera the application gets crashed with error "Lost connection to X's iPhone". I googled and found that this issue occurs when the image size is larger and this is memory issue. But I also tried to resolve the issue by resizing the photo size. Would you plz point me out that what I am missing.

Note :- Capturing photo is also slower than usual working of camera.

enter image description here

Source code

class CameraLibrary: NSObject {

weak var delegate: CameraLibraryDelegate?

var session: AVCaptureSession!

var sessionQueue: DispatchQueue!
var stillImageOutput: AVCaptureStillImageOutput?
init(sender: AnyObject) {
    super.init()
    self.delegate = sender as? CameraLibraryDelegate
    self.setObservers()
    self.initializeSession()
}

deinit {
    self.removeObservers()
}

// MARK: Session

func initializeSession() {
    self.session = AVCaptureSession()
    self.session.sessionPreset = AVCaptureSessionPresetPhoto
    self.sessionQueue = DispatchQueue(label: "camera session", attributes: [])

    self.sessionQueue.async {
        self.session.beginConfiguration()
        self.addVideoInput()
        self.addStillImageOutput()
        self.session.commitConfiguration()

        DispatchQueue.main.async {
            NSLog("Session initialization did complete")
            self.delegate?.cameraSessionConfigurationDidComplete()
        }
    }
}

func startCamera() {
    self.sessionQueue.async {
        self.session.startRunning()
    }
}

func stopCamera() {
    self.sessionQueue.async {
        self.session.stopRunning()
    }
}

func captureStillImage(_ completed: @escaping (_ image: UIImage?) -> Void) {
    if let imageOutput = self.stillImageOutput {
        self.sessionQueue.async(execute: { () -> Void in

            var videoConnection: AVCaptureConnection?
            for connection in imageOutput.connections {
                let c = connection as! AVCaptureConnection

                for port in c.inputPorts {
                    let p = port as! AVCaptureInputPort
                    if p.mediaType == AVMediaTypeVideo {
                        videoConnection = c;
                        break
                    }
                }

                if videoConnection != nil {
                    break
                }
            }

            if videoConnection != nil {
                imageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (imageSampleBuffer: CMSampleBuffer!, error) -> Void in
                    let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageSampleBuffer)
                    let image: UIImage? = UIImage(data: imageData!)!

                    //DispatchQueue.main.async {
                        completed(image)

                    //}
                })
            } else {
                DispatchQueue.main.async {
                    completed(nil)
                }
            }
       })
    } else {
        completed(nil)
    }
}
func checkCameraPermisson() -> Void {
    if AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) ==  AVAuthorizationStatus.authorized{
        print("Already Authorized")
    }
    else{
        AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { (granted :Bool) -> Void in
            if granted == true{
                print("User granted")
            }
            else{
                NotificationCenter.default.post(name: Notification.Name(rawValue: "CameraAccessDenied"), object: nil)

            }
        });
    }

}

// MARK: Configuration

func addVideoInput() {

    if let inputs = self.session.inputs as? [AVCaptureDeviceInput] {
        for input in inputs {
            self.session.removeInput(input)
            self.session.addInput(input)
        }
    }

}

func addStillImageOutput() {
    stillImageOutput = AVCaptureStillImageOutput()
    stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

    if self.session.canAddOutput(stillImageOutput) {
        session.addOutput(stillImageOutput)
    }
}


func deviceWithMediaTypeWithPosition(_ mediaType: NSString, position: AVCaptureDevicePosition) -> AVCaptureDevice {
    let devices: NSArray = AVCaptureDevice.devices(withMediaType: mediaType as String)! as NSArray
    var captureDevice: AVCaptureDevice = devices.firstObject as! AVCaptureDevice
    for device in devices {
        let d = device as! AVCaptureDevice
        if d.position == position {
            captureDevice = d

            break;

        }
    }
    return captureDevice
}

// MARK: Observers

func setObservers() {
    NotificationCenter.default.addObserver(self, selector: #selector(CameraLibrary.sessionDidStart(_:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil)
    NotificationCenter.default.addObserver(self, selector: #selector(CameraLibrary.sessionDidStop(_:)), name: NSNotification.Name.AVCaptureSessionDidStopRunning, object: nil)
}

func removeObservers() {
    NotificationCenter.default.removeObserver(self)
}

func sessionDidStart(_ notification: Notification) {
    DispatchQueue.main.async {
        NSLog("Session did start")
        self.delegate?.cameraSessionDidBegin()
    }
}

func sessionDidStop(_ notification: Notification) {
    DispatchQueue.main.async {
        NSLog("Session did stop")
        self.delegate?.cameraSessionDidStop()
    }
}
Shashi Ranjan
  • 154
  • 2
  • 12

0 Answers0