From 1e55aadb57218de9f96b4a33830c02eebd3682fd Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Mon, 7 Dec 2020 21:08:47 -1000 Subject: [PATCH 1/9] Fixed image not found bug --- Lobe_iOS/ViewModels/PlayViewModel.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/Lobe_iOS/ViewModels/PlayViewModel.swift b/Lobe_iOS/ViewModels/PlayViewModel.swift index cb0b426..e1ddac5 100644 --- a/Lobe_iOS/ViewModels/PlayViewModel.swift +++ b/Lobe_iOS/ViewModels/PlayViewModel.swift @@ -32,6 +32,7 @@ class PlayViewModel: ObservableObject { // Subscribe to changes on image $image + .drop(while: { $0 == nil }) .receive(on: DispatchQueue.main) .sink(receiveValue: fetchPrediction(forImage:)) .store(in: &disposables) From 7fb824259696955c356f1222cef7b770cb75bb78 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Sat, 12 Dec 2020 20:59:59 -1000 Subject: [PATCH 2/9] Image rotates properly with device orientation --- Lobe_iOS/Models/PredictionLayer.swift | 3 +- Lobe_iOS/ViewModels/PlayViewModel.swift | 27 ++++----- .../CaptureSessionViewController.swift | 58 +++++++++++++++++-- 3 files changed, 70 insertions(+), 18 deletions(-) diff --git a/Lobe_iOS/Models/PredictionLayer.swift b/Lobe_iOS/Models/PredictionLayer.swift index cc75f30..2b2531a 100644 --- a/Lobe_iOS/Models/PredictionLayer.swift +++ b/Lobe_iOS/Models/PredictionLayer.swift @@ -31,7 +31,8 @@ class PredictionLayer: NSObject, ImageClassificationPredicter { try? requestHandler.perform([request]) } - + + /// Creates request handler and formats image for prediciton processing. func createPredictionRequestHandler(forImage image: UIImage) -> VNImageRequestHandler { /* Crop to square images and send to the model. */ let _cgImage = image.squared()?.cgImage diff --git a/Lobe_iOS/ViewModels/PlayViewModel.swift b/Lobe_iOS/ViewModels/PlayViewModel.swift index e1ddac5..9d028d2 100644 --- a/Lobe_iOS/ViewModels/PlayViewModel.swift +++ b/Lobe_iOS/ViewModels/PlayViewModel.swift @@ -33,7 +33,6 @@ class PlayViewModel: ObservableObject { // Subscribe to changes on image $image .drop(while: { $0 == nil }) - .receive(on: DispatchQueue.main) .sink(receiveValue: fetchPrediction(forImage:)) .store(in: &disposables) } @@ -45,18 +44,20 @@ class PlayViewModel: ObservableObject { } self.imagePredicter .getPrdiction(forImage: image, onComplete: { [weak self] request in - guard let classifications = request.results as? [VNClassificationObservation] else { - self?.classificationLabel = "Classification Error" - return - } - - if classifications.isEmpty { - self?.classificationLabel = "No Labels Found" - } else { - /* Display top classifications ranked by confidence in the UI. */ - let topClassifications = classifications.prefix(1) - self?.classificationLabel = topClassifications[0].identifier - self?.confidence = topClassifications[0].confidence + DispatchQueue.main.async { [weak self] in + guard let classifications = request.results as? [VNClassificationObservation] else { + self?.classificationLabel = "Classification Error" + return + } + + if classifications.isEmpty { + self?.classificationLabel = "No Labels Found" + } else { + /* Display top classifications ranked by confidence in the UI. */ + let topClassifications = classifications.prefix(1) + self?.classificationLabel = topClassifications[0].identifier + self?.confidence = topClassifications[0].confidence + } } }, onError: { [weak self] error in self?.classificationLabel = "Classification Error" diff --git a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift index 9a17011..1d76c5f 100644 --- a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift +++ b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift @@ -153,7 +153,6 @@ class CaptureSessionViewController: UIViewController { preview.frame = self.view.bounds connection.videoOrientation = videoOrientation } - preview.frame = self.view.bounds } @@ -169,16 +168,40 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let curImg = UIImage(pixelBuffer: pixelBuffer), - let previewLayer = self.previewLayer + let previewLayer = self.previewLayer, + let videoOrientation = previewLayer.connection?.videoOrientation else { print("Failed creating image at captureOutput.") return } + + var radiansToRotate = CGFloat(0) + switch videoOrientation { + case .portrait: + radiansToRotate = .pi / 2 + break + case .portraitUpsideDown: + radiansToRotate = (3 * .pi) / 2 + break + case .landscapeLeft: + if (self.captureDevice == self.backCam) { + radiansToRotate = .pi + } + break + case .landscapeRight: + if (self.captureDevice == self.frontCam) { + radiansToRotate = .pi + } + break + default: + break + } - let rotatedImage = curImg.rotate(radians: .pi / 2) + // rotatedimage is 1000% broken + let rotatedImage = curImg.rotate(radians: radiansToRotate) /* Crop the captured image to be the size of the screen. */ - guard let croppedImage = rotatedImage.crop(height: previewLayer.frame.height, width: previewLayer.frame.width) else { + guard let croppedImage = rotatedImage.crop(height: previewLayer.bounds.height, width: previewLayer.bounds.width) else { fatalError("Could not crop image.") } @@ -248,6 +271,33 @@ extension UIImage { } return self } + +// func rotated(byDegrees degrees: CGFloat) -> UIImage! { +// // calculate the size of the rotated view's containing box for our drawing space +// let rotatedViewBox = UIView(frame: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height)) +// let t = CGAffineTransform(rotationAngle: DegreesToRadians(degrees)) +// rotatedViewBox.transform = t +// let rotatedSize = rotatedViewBox.frame.size +// +// // Create the bitmap context +// UIGraphicsBeginImageContext(rotatedSize) +// let bitmap = UIGraphicsGetCurrentContext() +// +// // Move the origin to the middle of the image so we will rotate and scale around the center. +// bitmap?.translateBy(x: rotatedSize.width/2, y: rotatedSize.height/2) +// +// // // Rotate the image context +// bitmap?.rotate(by: DegreesToRadians(degrees)) +// +// // Now, draw the rotated/scaled image into the context +// bitmap?.scaleBy(x: 1.0, y: -1.0) +// bitmap?.draw(self.cgImage!, in: CGRect(x: -self.size.width / 2, y: -self.size.height / 2, width: self.size.width, height: self.size.height)) +// +// let newImage = UIGraphicsGetImageFromCurrentImageContext() +// UIGraphicsEndImageContext() +// return newImage +// +// } } /// Conversion helper for AVCaptureSession orientation changes. From d1dd194137b9f5e6d63e6b280b1c36932a4244f1 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Sat, 12 Dec 2020 22:02:51 -1000 Subject: [PATCH 3/9] Fixed image cropping issue --- .../CaptureSessionViewController.swift | 76 +++++++------------ 1 file changed, 26 insertions(+), 50 deletions(-) diff --git a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift index 1d76c5f..7a90730 100644 --- a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift +++ b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift @@ -197,12 +197,10 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele break } - // rotatedimage is 1000% broken - let rotatedImage = curImg.rotate(radians: radiansToRotate) - - /* Crop the captured image to be the size of the screen. */ - guard let croppedImage = rotatedImage.crop(height: previewLayer.bounds.height, width: previewLayer.bounds.width) else { - fatalError("Could not crop image.") + /* Rotate and crop the captured image to be the size of the screen. */ + guard let rotatedImage = curImg.rotate(radians: radiansToRotate), + let croppedImage = rotatedImage.crop(height: previewLayer.bounds.height, width: previewLayer.bounds.width) else { + fatalError("Could not rotate or crop image.") } self.delegate?.setCameraImage(with: croppedImage) @@ -253,51 +251,29 @@ extension UIImage { self.init(cgImage: myImage) } - func rotate(radians: CGFloat) -> UIImage { - let rotatedSize = CGRect(origin: .zero, size: size) - .applying(CGAffineTransform(rotationAngle: CGFloat(radians))) - .integral.size - UIGraphicsBeginImageContext(rotatedSize) - if let context = UIGraphicsGetCurrentContext() { - let origin = CGPoint(x: rotatedSize.width / 2.0, - y: rotatedSize.height / 2.0) - context.translateBy(x: origin.x, y: origin.y) - context.rotate(by: radians) - draw(in: CGRect(x: -origin.y, y: -origin.x, - width: size.width, height: size.height)) - let rotatedImage = UIGraphicsGetImageFromCurrentImageContext() - UIGraphicsEndImageContext() - return rotatedImage ?? self - } - return self - } -// func rotated(byDegrees degrees: CGFloat) -> UIImage! { -// // calculate the size of the rotated view's containing box for our drawing space -// let rotatedViewBox = UIView(frame: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height)) -// let t = CGAffineTransform(rotationAngle: DegreesToRadians(degrees)) -// rotatedViewBox.transform = t -// let rotatedSize = rotatedViewBox.frame.size -// -// // Create the bitmap context -// UIGraphicsBeginImageContext(rotatedSize) -// let bitmap = UIGraphicsGetCurrentContext() -// -// // Move the origin to the middle of the image so we will rotate and scale around the center. -// bitmap?.translateBy(x: rotatedSize.width/2, y: rotatedSize.height/2) -// -// // // Rotate the image context -// bitmap?.rotate(by: DegreesToRadians(degrees)) -// -// // Now, draw the rotated/scaled image into the context -// bitmap?.scaleBy(x: 1.0, y: -1.0) -// bitmap?.draw(self.cgImage!, in: CGRect(x: -self.size.width / 2, y: -self.size.height / 2, width: self.size.width, height: self.size.height)) -// -// let newImage = UIGraphicsGetImageFromCurrentImageContext() -// UIGraphicsEndImageContext() -// return newImage -// -// } + func rotate(radians: CGFloat) -> UIImage? { + var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size + // Trim off the extremely small float value to prevent core graphics from rounding it up + newSize.width = floor(newSize.width) + newSize.height = floor(newSize.height) + + UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale) + let context = UIGraphicsGetCurrentContext()! + + // Move origin to middle + context.translateBy(x: newSize.width/2, y: newSize.height/2) + // Rotate around middle + context.rotate(by: CGFloat(radians)) + + // Draw the image at its center + self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height)) + + let newImage = UIGraphicsGetImageFromCurrentImageContext() + UIGraphicsEndImageContext() + + return newImage + } } /// Conversion helper for AVCaptureSession orientation changes. From b54da2cea4be8ea7f5b7b19bc0092a65f9f0c8d0 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Sun, 13 Dec 2020 18:39:26 -1000 Subject: [PATCH 4/9] Flipped horizontal axis for front-facing camera --- .../PlayView/CaptureSessionViewController.swift | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift index 7a90730..f715b42 100644 --- a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift +++ b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift @@ -167,7 +167,7 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele if totalFrameCount % 20 != 0{ return } guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), - let curImg = UIImage(pixelBuffer: pixelBuffer), + let image = UIImage(pixelBuffer: pixelBuffer), let previewLayer = self.previewLayer, let videoOrientation = previewLayer.connection?.videoOrientation else { @@ -198,7 +198,8 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele } /* Rotate and crop the captured image to be the size of the screen. */ - guard let rotatedImage = curImg.rotate(radians: radiansToRotate), + let isUsingFrontCam = self.captureDevice == self.frontCam + guard let rotatedImage = image.rotate(radians: radiansToRotate, flipX: isUsingFrontCam), let croppedImage = rotatedImage.crop(height: previewLayer.bounds.height, width: previewLayer.bounds.width) else { fatalError("Could not rotate or crop image.") } @@ -252,7 +253,7 @@ extension UIImage { self.init(cgImage: myImage) } - func rotate(radians: CGFloat) -> UIImage? { + func rotate(radians: CGFloat, flipX: Bool = false) -> UIImage? { var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size // Trim off the extremely small float value to prevent core graphics from rounding it up newSize.width = floor(newSize.width) @@ -263,9 +264,13 @@ extension UIImage { // Move origin to middle context.translateBy(x: newSize.width/2, y: newSize.height/2) + + // Flip x-axis if specified (used to correct front-facing cam + if flipX { context.scaleBy(x: -1, y: 1) } + // Rotate around middle context.rotate(by: CGFloat(radians)) - + // Draw the image at its center self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height)) From cdf1cf12d631f67d3f771651ee2ea6932a054ba9 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Sun, 13 Dec 2020 19:23:47 -1000 Subject: [PATCH 5/9] Comments --- Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift index f715b42..04318cf 100644 --- a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift +++ b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift @@ -162,7 +162,7 @@ class CaptureSessionViewController: UIViewController { /// Defines delegate method. extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - /* Skip frames to optimize. */ + // Skip frames to optimize. totalFrameCount += 1 if totalFrameCount % 20 != 0{ return } @@ -175,6 +175,7 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele return } + // Determine rotation by radians given device orientation and camera device var radiansToRotate = CGFloat(0) switch videoOrientation { case .portrait: @@ -197,7 +198,7 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele break } - /* Rotate and crop the captured image to be the size of the screen. */ + // Rotate and crop the captured image to be the size of the screen. let isUsingFrontCam = self.captureDevice == self.frontCam guard let rotatedImage = image.rotate(radians: radiansToRotate, flipX: isUsingFrontCam), let croppedImage = rotatedImage.crop(height: previewLayer.bounds.height, width: previewLayer.bounds.width) else { From 1c9e01c672cda9dcc44633203a6264d8d060c83b Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Sun, 13 Dec 2020 19:49:38 -1000 Subject: [PATCH 6/9] Moved image classification task to background thread --- Lobe_iOS/ViewModels/PlayViewModel.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/Lobe_iOS/ViewModels/PlayViewModel.swift b/Lobe_iOS/ViewModels/PlayViewModel.swift index 9d028d2..f12b257 100644 --- a/Lobe_iOS/ViewModels/PlayViewModel.swift +++ b/Lobe_iOS/ViewModels/PlayViewModel.swift @@ -33,6 +33,7 @@ class PlayViewModel: ObservableObject { // Subscribe to changes on image $image .drop(while: { $0 == nil }) + .receive(on: DispatchQueue.global(qos: .userInitiated)) .sink(receiveValue: fetchPrediction(forImage:)) .store(in: &disposables) } From d0f0a95c80ea7e3d0bd8d61c24b1c8dd045a1312 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Mon, 14 Dec 2020 18:02:14 -1000 Subject: [PATCH 7/9] Fixed cropping by using image.squared helper --- Lobe_iOS/Models/PredictionLayer.swift | 3 +-- .../CaptureSessionViewController.swift | 19 ++----------------- Lobe_iOS/Views/PlayView/PlayView.swift | 7 ++++++- 3 files changed, 9 insertions(+), 20 deletions(-) diff --git a/Lobe_iOS/Models/PredictionLayer.swift b/Lobe_iOS/Models/PredictionLayer.swift index 2b2531a..8a70de5 100644 --- a/Lobe_iOS/Models/PredictionLayer.swift +++ b/Lobe_iOS/Models/PredictionLayer.swift @@ -35,8 +35,7 @@ class PredictionLayer: NSObject, ImageClassificationPredicter { /// Creates request handler and formats image for prediciton processing. func createPredictionRequestHandler(forImage image: UIImage) -> VNImageRequestHandler { /* Crop to square images and send to the model. */ - let _cgImage = image.squared()?.cgImage - guard let cgImage = _cgImage else { + guard let cgImage = image.cgImage else { fatalError("Could not create cgImage in captureOutput") } diff --git a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift index 04318cf..13ca467 100644 --- a/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift +++ b/Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift @@ -201,11 +201,11 @@ extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDele // Rotate and crop the captured image to be the size of the screen. let isUsingFrontCam = self.captureDevice == self.frontCam guard let rotatedImage = image.rotate(radians: radiansToRotate, flipX: isUsingFrontCam), - let croppedImage = rotatedImage.crop(height: previewLayer.bounds.height, width: previewLayer.bounds.width) else { + let squaredImage = rotatedImage.squared() else { fatalError("Could not rotate or crop image.") } - self.delegate?.setCameraImage(with: croppedImage) + self.delegate?.setCameraImage(with: squaredImage) } } @@ -228,21 +228,6 @@ extension UIImage { .draw(in: .init(origin: .zero, size: breadthSize)) } } - func crop(isOpaque: Bool = false, height: CGFloat, width: CGFloat) -> UIImage? { - let newWidth = size.width - let newHeight = height / width * size.width - var screenSize: CGSize { .init(width: newWidth, height: newHeight)} - guard let cgImage = cgImage? - .cropping(to: .init(origin: .init(x: 0, - y: ((size.height - newHeight) / 2)), - size: screenSize)) else { return nil } - let format = imageRendererFormat - format.opaque = isOpaque - return UIGraphicsImageRenderer(size: screenSize, format: format).image { _ in - UIImage(cgImage: cgImage, scale: 1, orientation: imageOrientation) - .draw(in: .init(origin: .zero, size: screenSize)) - } - } public convenience init?(pixelBuffer: CVPixelBuffer) { var cgImage: CGImage? VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage) diff --git a/Lobe_iOS/Views/PlayView/PlayView.swift b/Lobe_iOS/Views/PlayView/PlayView.swift index 19fe64e..f08239b 100644 --- a/Lobe_iOS/Views/PlayView/PlayView.swift +++ b/Lobe_iOS/Views/PlayView/PlayView.swift @@ -44,8 +44,13 @@ struct PlayView: View { .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity) .background(Color.black) .edgesIgnoringSafeArea(.all) - + VStack { + if let image = self.viewModel.image { + Image(uiImage: image) + .resizable() + .aspectRatio(image.size, contentMode: .fit) + } Spacer() PredictionLabelView(classificationLabel: self.$viewModel.classificationLabel, confidence: self.$viewModel.confidence, projectName: self.viewModel.project.name) } From 059713a77ff0675a3958f2ef6481c470a57127d3 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Mon, 14 Dec 2020 18:31:56 -1000 Subject: [PATCH 8/9] Fixed screenshot to only save to storage the image for processing --- Lobe_iOS/Views/PlayView/CameraView.swift | 41 ++++-------------------- 1 file changed, 6 insertions(+), 35 deletions(-) diff --git a/Lobe_iOS/Views/PlayView/CameraView.swift b/Lobe_iOS/Views/PlayView/CameraView.swift index aec30bd..46efca5 100644 --- a/Lobe_iOS/Views/PlayView/CameraView.swift +++ b/Lobe_iOS/Views/PlayView/CameraView.swift @@ -41,14 +41,18 @@ struct CameraView: UIViewControllerRepresentable { init(_ parent: CameraView) { self.parent = parent } - /// Wrapper for screen shot.. + /// Wrapper for screen shot, which saves to storage the image which gets used for inference. func takeScreenShot(inView view: UIView) { guard let camImage = self.parent.viewModel.image else { fatalError("Could not call takeScreenShot") } + + /// Create a `UIImageView` for overlaying the shutter animation over the camera view. + /// Remove it from the super view after image is saved to storage. let imageView = UIImageView(image: camImage) screenShotAnimate(inView: view, imageView: imageView) - screenShotSaveToLibrary(imageView: imageView) + UIImageWriteToSavedPhotosAlbum(camImage, nil, nil, nil) + imageView.removeFromSuperview() } /// Provides flash animation when screenshot is triggered. @@ -67,39 +71,6 @@ struct CameraView: UIViewControllerRepresentable { UIView.animate(withDuration: 0.3, delay: 0, options: UIView.AnimationOptions.curveLinear, animations: { blackView.alpha = 0 }, completion: nil) - - if self.parent.viewModel.viewMode == .Camera { - UIView.transition(with: view, duration: 1, options: .curveEaseIn, animations: nil) - view.addSubview(imageView) - self.parent.viewModel.viewMode = .ImagePreview - } - } - - /// Saves screen shot photo to library. - private func screenShotSaveToLibrary(imageView: UIImageView) { - guard let layer = UIApplication.shared.windows.first(where: \.isKeyWindow)?.layer else { - fatalError("Could not get layer for keyWindow") - } - - // Must be called before screenshot context is gathered - let scale = UIScreen.main.scale - UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, scale) - - // Get screenshot data - guard let uiGraphicsCtx = UIGraphicsGetCurrentContext() else { - fatalError("Could not get screenshot context") - } - layer.render(in: uiGraphicsCtx) - let screenshot = UIGraphicsGetImageFromCurrentImageContext() - - UIGraphicsEndImageContext() - guard let camImage = self.parent.viewModel.image, screenshot != nil else { - fatalError("Unable to save screenshot") - } - UIImageWriteToSavedPhotosAlbum(screenshot!, nil, nil, nil) - imageView.removeFromSuperview() - self.parent.viewModel.viewMode = .Camera - self.parent.viewModel.image = camImage } /// Sets view model image. From 07c4d4679b6f5ab6adc0ca9c00c5a83ea21f0af3 Mon Sep 17 00:00:00 2001 From: Elliot Boschwitz Date: Mon, 14 Dec 2020 18:35:11 -1000 Subject: [PATCH 9/9] Remove the debugger view --- Lobe_iOS/Views/PlayView/PlayView.swift | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Lobe_iOS/Views/PlayView/PlayView.swift b/Lobe_iOS/Views/PlayView/PlayView.swift index f08239b..bb38cb4 100644 --- a/Lobe_iOS/Views/PlayView/PlayView.swift +++ b/Lobe_iOS/Views/PlayView/PlayView.swift @@ -46,11 +46,6 @@ struct PlayView: View { .edgesIgnoringSafeArea(.all) VStack { - if let image = self.viewModel.image { - Image(uiImage: image) - .resizable() - .aspectRatio(image.size, contentMode: .fit) - } Spacer() PredictionLabelView(classificationLabel: self.$viewModel.classificationLabel, confidence: self.$viewModel.confidence, projectName: self.viewModel.project.name) }