skip to Main Content

I am trying to integrate AVFoundation in my flutter project to scan QR code. I have followed the docs and wrote the following code

class CealScanQrView extends StatelessWidget {
  const CealScanQrView({super.key});

  @override
  Widget build(BuildContext context) {
    final Map<String, dynamic> creationParams = <String, dynamic>{};
    return Platform.isAndroid
        ? AndroidView(
            viewType: cealScanQrView,
            layoutDirection: TextDirection.ltr,
            creationParams: creationParams,
            creationParamsCodec: const StandardMessageCodec(),
          )
        : UiKitView(
            viewType: cealScanQrView,
            layoutDirection: TextDirection.ltr,
            creationParams: creationParams,
            creationParamsCodec: const StandardMessageCodec(),
          );
  }
}

I have created CealScanViewNativeViewFactory, CealScanViewNativeView and registered in my AppDelegate using below code

weak var registrar = self.registrar(forPlugin: "ceal-views")
        let cealQrViewfactory = CealQrViewNativeViewFactory(messenger: registrar!.messenger())
        let viewRegistrar = self.registrar(forPlugin: "<ceal-views>")!
        viewRegistrar.register(
            cealQrViewfactory,
            withId: "cealQrView")

Below is my CealScanViewNativeView code

import Foundation
import UIKit
import AVFoundation

class CealScanViewNativeView: NSObject, FlutterPlatformView,AVCaptureMetadataOutputObjectsDelegate {
    
    private var _view: UIView
    
    var captureSession: AVCaptureSession!
    var previewLayer: AVCaptureVideoPreviewLayer!
    
    init(
        frame: CGRect,
        viewIdentifier viewId: Int64,
        arguments args: Any?,
        binaryMessenger messenger: FlutterBinaryMessenger?
    ) {
        _view = UIView()
        super.init()
        setUpView()
    }
    
    func view() -> UIView {
        return _view
    }
    
    private func setUpView(){
        
        _view.backgroundColor = UIColor.clear
        
        captureSession = AVCaptureSession()

        guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
        let videoInput: AVCaptureDeviceInput

        do {
            videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
        } catch {
            return
        }

        if (captureSession.canAddInput(videoInput)) {
            captureSession.addInput(videoInput)
        } else {
            failed()
            return
        }

        let metadataOutput = AVCaptureMetadataOutput()

        if (captureSession.canAddOutput(metadataOutput)) {
            captureSession.addOutput(metadataOutput)

            metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            metadataOutput.metadataObjectTypes = [.qr]
        } else {
            failed()
            return
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer.frame = _view.layer.bounds
        previewLayer.videoGravity = .resizeAspectFill
        _view.layer.addSublayer(previewLayer)

        self.captureSession.startRunning()
        
    }
    
    func failed() {
        let ac = UIAlertController(title: "Scanning not supported", message: "Device does not support scanning", preferredStyle: .alert)
        ac.addAction(UIAlertAction(title: "Ok", style: .default))
        UIApplication.shared.connectedScenes.flatMap { ($0 as? UIWindowScene)?.windows ?? [] }
            .first { $0.isKeyWindow }?.rootViewController?.present(ac, animated: true)
        captureSession = nil
    }
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        captureSession.stopRunning()
        
        if let metadataObject = metadataObjects.first {
            guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
            guard let stringValue = readableObject.stringValue else { return }
            AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
            found(code: stringValue)
        }
        UIApplication.shared.connectedScenes.flatMap { ($0 as? UIWindowScene)?.windows ?? [] }
            .first { $0.isKeyWindow }?.rootViewController?.dismiss(animated: true)
    }
    
    func found(code: String) {
        debugPrint("Code is (code)")
    }
    
}

I have given the camera permission as well but as soon as I open the view, I don’t see the camera. I tried changing the _view.backgroundColor to red and it is visible so the view is setup correctly but I don’t get the camera. In my Xcode logs I see below warning

Thread Performance Checker: -[AVCaptureSession startRunning] should be called from background thread. Calling it on the main thread can lead to UI unresponsiveness
PID: 1107, TID: 125465

3

Answers


  1. Chosen as BEST ANSWER

    Instead of using

    previewLayer.frame = _view.layer.bounds
    

    use

    previewLayer.frame = CGRect(x: 0, y: 0,width: 200,height: 200)
    

    Change width and height according to your needs


  2. I don’t see where you are adding the view holding the preview layer to the hierarchy. Make sure you add it too.

    There are a couple of things that you can do to make this work.

    1. Session configuration takes time and it needs to be done on a separate serial queue, like so:
        private let sessionQueue = DispatchQueue(label: "session queue")
        ...
        sessionQueue.async {
            self.setUpView()
        }
    
    1. Make variables class properties, like so:
        private let metadataOutput = AVCaptureMetadataOutput()
        private let metadataObjectsQueue = DispatchQueue(label: "metadata objects queue", attributes: [], target: nil)
    
    1. Process output on a separate queue
        metadataOutput.setMetadataObjectsDelegate(self, queue: metadataObjectsQueue)
        metadataOutput.metadataObjectTypes = metadataOutput.availableMetadataObjectTypes
    
    1. Start session on the session queue
    sessionQueue.async {
        if self.isSessionRunning {
            self.session.startRunning()
            self.isSessionRunning = self.session.isRunning
        }
    }
    

    You can find complete sample at https://developer.apple.com/documentation/avfoundation/capture_setup/avcambarcode_detecting_barcodes_and_faces

    Login or Signup to reply.
  3. If the purpose is scanning qr code, why are you not using

    https://pub.dev/packages/qr_code_scanner

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search