Come usare la computer vision in un’app iOS

Se volete armeggiare da soli e imparare lungo la strada, vi suggerisco di seguire questo articolo che ho trovato su Medium. This approach is using native Swift code to extract camera frame, then you can process that frame with your own code.

  1. import UIKit 
  2. import AVFoundation 
  3. protocol FrameExtractorDelegate: class { 
  4. func captured(image: UIImage) 
  5. class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { 
  6. private let position = AVCaptureDevicePosition.front 
  7. private let quality = AVCaptureSessionPresetMedium 
  8. private var permissionGranted = false 
  9. private let sessionQueue = DispatchQueue(label: "session queue") 
  10. private let captureSession = AVCaptureSession() 
  11. private let context = CIContext() 
  12. weak var delegate: FrameExtractorDelegate? 
  13. override init() { 
  14. super.init() 
  15. checkPermission() 
  16. sessionQueue.async { [unowned self] in 
  17. self.configureSession() 
  18. self.captureSession.startRunning() 
  19. // MARK: AVSession configuration 
  20. private func checkPermission() { 
  21. switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) { 
  22. case .authorized: 
  23. permissionGranted = true 
  24. case .notDetermined: 
  25. requestPermission() 
  26. default: 
  27. permissionGranted = false 
  28. private func requestPermission() { 
  29. sessionQueue.suspend() 
  30. AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) { [unowned self] granted in 
  31. self.permissionGranted = granted 
  32. self.sessionQueue.resume() 
  33. private func configureSession() { 
  34. guard permissionGranted else { return } 
  35. captureSession.sessionPreset = quality 
  36. guard let captureDevice = selectCaptureDevice() else { return } 
  37. guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else { return } 
  38. guard captureSession.canAddInput(captureDeviceInput) else { return } 
  39. captureSession.addInput(captureDeviceInput) 
  40. let videoOutput = AVCaptureVideoDataOutput() 
  41. videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer")) 
  42. guard captureSession.canAddOutput(videoOutput) else { return } 
  43. captureSession.addOutput(videoOutput) 
  44. guard let connection = videoOutput.connection(withMediaType: AVFoundation.AVMediaTypeVideo) else { return } 
  45. guard connection.isVideoOrientationSupported else { return } 
  46. guard connection.isVideoMirroringSupported else { return } 
  47. connection.videoOrientation = .portrait 
  48. connection.isVideoMirrored = position == .front 
  49. private func selectCaptureDevice() -> AVCaptureDevice? { 
  50. return AVCaptureDevice.devices().filter { 
  51. ($0 as AnyObject).hasMediaType(AVMediaTypeVideo) && 
  52. ($0 as AnyObject).position == position 
  53. }.first as? AVCaptureDevice 
  54. // MARK: Sample buffer to UIImage conversion 
  55. private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? { 
  56. guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil } 
  57. let ciImage = CIImage(cvPixelBuffer: imageBuffer) 
  58. guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil } 
  59. return UIImage(cgImage: cgImage) 
  60. // MARK: AVCaptureVideoDataOutputSampleBufferDelegate 
  61. func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 
  62. guard let uiImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return } 
  63. DispatchQueue.main.async { [unowned self] in 
  64. self.delegate?.captured(image: uiImage) 

iOS — Camera Frames Extraction – iOS App Development – Medium