//
// CanliKameraileNesneViewController.swift
// ML Ornekleri
//
// Created by Bülent Siyah on 17.06.2018.
// Copyright © 2018 Bülent Siyah. All rights reserved.
//
import UIKit
import AVFoundation
import Vision
class CanliKameraileNesneViewController: UIViewController {
@IBOutlet weak var previView2: PreviewView!
@IBOutlet weak var objectTextView2: UITextView!
// Live Camera Properties
let captureSession = AVCaptureSession()
var captureDevice:AVCaptureDevice!
var devicePosition: AVCaptureDevice.Position = .back
var requests = [VNRequest]()
override func viewDidLoad() {
super.viewDidLoad()
setupVision()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func setupVision() {
// Rectangle Request
let rectangleDetectionRequest = VNDetectRectanglesRequest(completionHandler: handleRectangles)
rectangleDetectionRequest.minimumSize = 0.1
rectangleDetectionRequest.maximumObservations = 1
// Object Classification
guard let visionModel = try? VNCoreMLModel(for: Inceptionv3().model) else {fatalError("cant load Vision ML model")}
let classificationRequest = VNCoreMLRequest(model: visionModel, completionHandler: handleClassification)
classificationRequest.imageCropAndScaleOption = .centerCrop
self.requests = [rectangleDetectionRequest, classificationRequest]
}
func handleRectangles (request:VNRequest, error:Error?) {
DispatchQueue.main.async {
self.drawVisionRequestResults(request.results as! [VNRectangleObservation])
}
}
func drawVisionRequestResults (_ results:[VNRectangleObservation]) {
previView2.removeMask()
let transform = CGAffineTransform(scaleX: 1, y: -1).translatedBy(x: 0, y: -self.previView2.frame.height)
let translate = CGAffineTransform.identity.scaledBy(x: self.previView2.frame.width, y: self.previView2.frame.height)
for rectangle in results {
let rectangleBounds = rectangle.boundingBox.applying(translate).applying(transform)
previView2.drawLayer(in: rectangleBounds)
}
}
func handleClassification (request:VNRequest, error:Error?) {
guard let observations = request.results else {print("no results:\(String(describing: error?.localizedDescription))"); return}
let classifcations = observations[0...4]
.flatMap({$0 as? VNClassificationObservation})
.filter({$0.confidence > 0.3})
.map({$0.identifier})
for classification in classifcations {
DispatchQueue.main.async {
self.objectTextView2.text = classification
}
}
}
}