Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed image processing bugs #20

Merged
merged 9 commits into from
Dec 29, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Lobe_iOS/Models/PredictionLayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,11 @@ class PredictionLayer: NSObject, ImageClassificationPredicter {

try? requestHandler.perform([request])
}


/// Creates request handler and formats image for prediciton processing.
func createPredictionRequestHandler(forImage image: UIImage) -> VNImageRequestHandler {
/* Crop to square images and send to the model. */
let _cgImage = image.squared()?.cgImage
guard let cgImage = _cgImage else {
guard let cgImage = image.cgImage else {
fatalError("Could not create cgImage in captureOutput")
}

Expand Down
29 changes: 16 additions & 13 deletions Lobe_iOS/ViewModels/PlayViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ class PlayViewModel: ObservableObject {

// Subscribe to changes on image
$image
.receive(on: DispatchQueue.main)
.drop(while: { $0 == nil })
.receive(on: DispatchQueue.global(qos: .userInitiated))
.sink(receiveValue: fetchPrediction(forImage:))
.store(in: &disposables)
}
Expand All @@ -44,18 +45,20 @@ class PlayViewModel: ObservableObject {
}
self.imagePredicter
.getPrdiction(forImage: image, onComplete: { [weak self] request in
guard let classifications = request.results as? [VNClassificationObservation] else {
self?.classificationLabel = "Classification Error"
return
}

if classifications.isEmpty {
self?.classificationLabel = "No Labels Found"
} else {
/* Display top classifications ranked by confidence in the UI. */
let topClassifications = classifications.prefix(1)
self?.classificationLabel = topClassifications[0].identifier
self?.confidence = topClassifications[0].confidence
DispatchQueue.main.async { [weak self] in
guard let classifications = request.results as? [VNClassificationObservation] else {
self?.classificationLabel = "Classification Error"
return
}

if classifications.isEmpty {
self?.classificationLabel = "No Labels Found"
} else {
/* Display top classifications ranked by confidence in the UI. */
let topClassifications = classifications.prefix(1)
self?.classificationLabel = topClassifications[0].identifier
self?.confidence = topClassifications[0].confidence
}
}
}, onError: { [weak self] error in
self?.classificationLabel = "Classification Error"
Expand Down
41 changes: 6 additions & 35 deletions Lobe_iOS/Views/PlayView/CameraView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,18 @@ struct CameraView: UIViewControllerRepresentable {
init(_ parent: CameraView) {
self.parent = parent
}
/// Wrapper for screen shot..
/// Wrapper for screen shot, which saves to storage the image which gets used for inference.
func takeScreenShot(inView view: UIView) {
guard let camImage = self.parent.viewModel.image else {
fatalError("Could not call takeScreenShot")
}

/// Create a `UIImageView` for overlaying the shutter animation over the camera view.
/// Remove it from the super view after image is saved to storage.
let imageView = UIImageView(image: camImage)
screenShotAnimate(inView: view, imageView: imageView)
screenShotSaveToLibrary(imageView: imageView)
UIImageWriteToSavedPhotosAlbum(camImage, nil, nil, nil)
imageView.removeFromSuperview()
}

/// Provides flash animation when screenshot is triggered.
Expand All @@ -67,39 +71,6 @@ struct CameraView: UIViewControllerRepresentable {
UIView.animate(withDuration: 0.3, delay: 0, options: UIView.AnimationOptions.curveLinear, animations: {
blackView.alpha = 0
}, completion: nil)

if self.parent.viewModel.viewMode == .Camera {
UIView.transition(with: view, duration: 1, options: .curveEaseIn, animations: nil)
view.addSubview(imageView)
self.parent.viewModel.viewMode = .ImagePreview
}
}

/// Saves screen shot photo to library.
private func screenShotSaveToLibrary(imageView: UIImageView) {
guard let layer = UIApplication.shared.windows.first(where: \.isKeyWindow)?.layer else {
fatalError("Could not get layer for keyWindow")
}

// Must be called before screenshot context is gathered
let scale = UIScreen.main.scale
UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, scale)

// Get screenshot data
guard let uiGraphicsCtx = UIGraphicsGetCurrentContext() else {
fatalError("Could not get screenshot context")
}
layer.render(in: uiGraphicsCtx)
let screenshot = UIGraphicsGetImageFromCurrentImageContext()

UIGraphicsEndImageContext()
guard let camImage = self.parent.viewModel.image, screenshot != nil else {
fatalError("Unable to save screenshot")
}
UIImageWriteToSavedPhotosAlbum(screenshot!, nil, nil, nil)
imageView.removeFromSuperview()
self.parent.viewModel.viewMode = .Camera
self.parent.viewModel.image = camImage
}

/// Sets view model image.
Expand Down
101 changes: 59 additions & 42 deletions Lobe_iOS/Views/PlayView/CaptureSessionViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,6 @@ class CaptureSessionViewController: UIViewController {
preview.frame = self.view.bounds
connection.videoOrientation = videoOrientation
}

preview.frame = self.view.bounds

}
Expand All @@ -163,26 +162,50 @@ class CaptureSessionViewController: UIViewController {
/// Defines delegate method.
extension CaptureSessionViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
/* Skip frames to optimize. */
// Skip frames to optimize.
totalFrameCount += 1
if totalFrameCount % 20 != 0{ return }

guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
let curImg = UIImage(pixelBuffer: pixelBuffer),
let previewLayer = self.previewLayer
let image = UIImage(pixelBuffer: pixelBuffer),
let previewLayer = self.previewLayer,
let videoOrientation = previewLayer.connection?.videoOrientation
else {
print("Failed creating image at captureOutput.")
return
}

// Determine rotation by radians given device orientation and camera device
var radiansToRotate = CGFloat(0)
switch videoOrientation {
case .portrait:
radiansToRotate = .pi / 2
break
case .portraitUpsideDown:
radiansToRotate = (3 * .pi) / 2
break
case .landscapeLeft:
if (self.captureDevice == self.backCam) {
radiansToRotate = .pi
}
break
case .landscapeRight:
if (self.captureDevice == self.frontCam) {
radiansToRotate = .pi
}
break
default:
break
}

let rotatedImage = curImg.rotate(radians: .pi / 2)

/* Crop the captured image to be the size of the screen. */
guard let croppedImage = rotatedImage.crop(height: previewLayer.frame.height, width: previewLayer.frame.width) else {
fatalError("Could not crop image.")
// Rotate and crop the captured image to be the size of the screen.
let isUsingFrontCam = self.captureDevice == self.frontCam
guard let rotatedImage = image.rotate(radians: radiansToRotate, flipX: isUsingFrontCam),
let squaredImage = rotatedImage.squared() else {
fatalError("Could not rotate or crop image.")
}

self.delegate?.setCameraImage(with: croppedImage)
self.delegate?.setCameraImage(with: squaredImage)
}
}

Expand All @@ -205,21 +228,6 @@ extension UIImage {
.draw(in: .init(origin: .zero, size: breadthSize))
}
}
func crop(isOpaque: Bool = false, height: CGFloat, width: CGFloat) -> UIImage? {
let newWidth = size.width
let newHeight = height / width * size.width
var screenSize: CGSize { .init(width: newWidth, height: newHeight)}
guard let cgImage = cgImage?
.cropping(to: .init(origin: .init(x: 0,
y: ((size.height - newHeight) / 2)),
size: screenSize)) else { return nil }
let format = imageRendererFormat
format.opaque = isOpaque
return UIGraphicsImageRenderer(size: screenSize, format: format).image { _ in
UIImage(cgImage: cgImage, scale: 1, orientation: imageOrientation)
.draw(in: .init(origin: .zero, size: screenSize))
}
}
public convenience init?(pixelBuffer: CVPixelBuffer) {
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
Expand All @@ -230,23 +238,32 @@ extension UIImage {

self.init(cgImage: myImage)
}
func rotate(radians: CGFloat) -> UIImage {
let rotatedSize = CGRect(origin: .zero, size: size)
.applying(CGAffineTransform(rotationAngle: CGFloat(radians)))
.integral.size
UIGraphicsBeginImageContext(rotatedSize)
if let context = UIGraphicsGetCurrentContext() {
let origin = CGPoint(x: rotatedSize.width / 2.0,
y: rotatedSize.height / 2.0)
context.translateBy(x: origin.x, y: origin.y)
context.rotate(by: radians)
draw(in: CGRect(x: -origin.y, y: -origin.x,
width: size.width, height: size.height))
let rotatedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return rotatedImage ?? self
}
return self

func rotate(radians: CGFloat, flipX: Bool = false) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
// Trim off the extremely small float value to prevent core graphics from rounding it up
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)

UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!

// Move origin to middle
context.translateBy(x: newSize.width/2, y: newSize.height/2)

// Flip x-axis if specified (used to correct front-facing cam
if flipX { context.scaleBy(x: -1, y: 1) }

// Rotate around middle
context.rotate(by: CGFloat(radians))

// Draw the image at its center
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))

let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()

return newImage
}
}

Expand Down
2 changes: 1 addition & 1 deletion Lobe_iOS/Views/PlayView/PlayView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ struct PlayView: View {
.frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity)
.background(Color.black)
.edgesIgnoringSafeArea(.all)

VStack {
Spacer()
PredictionLabelView(classificationLabel: self.$viewModel.classificationLabel, confidence: self.$viewModel.confidence, projectName: self.viewModel.project.name)
Expand Down