diff --git a/Lumina.podspec b/Lumina.podspec index 0a888b4..8c335f1 100644 --- a/Lumina.podspec +++ b/Lumina.podspec @@ -1,6 +1,6 @@ Pod::Spec.new do |s| s.name = "Lumina" - s.version = "0.11.2" + s.version = "0.12.0" s.summary = "Lumina gives you a camera for most photo processing needs, including streaming frames for CoreML live detection." s.homepage = "https://github.com/dokun1/Lumina" s.license = { :type => "MIT" } diff --git a/Lumina/.swiftlint.yml b/Lumina/.swiftlint.yml index f003326..a13582d 100644 --- a/Lumina/.swiftlint.yml +++ b/Lumina/.swiftlint.yml @@ -1,2 +1,5 @@ disabled_rules: - line_length +file_length: + warning: 600 + error: 1000 diff --git a/Lumina/Lumina.xcodeproj/project.pbxproj b/Lumina/Lumina.xcodeproj/project.pbxproj index 787fb53..f51420f 100644 --- a/Lumina/Lumina.xcodeproj/project.pbxproj +++ b/Lumina/Lumina.xcodeproj/project.pbxproj @@ -9,6 +9,26 @@ /* Begin PBXBuildFile section */ 5332084E1F9A99CA008354F7 /* LuminaDeviceUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5332084D1F9A99CA008354F7 /* LuminaDeviceUtil.swift */; }; 5365BFD61F79941900B8F338 /* LuminaObjectRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365BFD51F79941900B8F338 /* LuminaObjectRecognizer.swift */; }; + 5373B3C41FC386C600C197BD /* FileRecordingExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3C31FC386C600C197BD /* FileRecordingExtension.swift */; }; + 5373B3C61FC387EE00C197BD /* DepthDataExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3C51FC387EE00C197BD /* DepthDataExtension.swift */; }; + 5373B3C81FC3882900C197BD /* SampleBufferExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3C71FC3882900C197BD /* SampleBufferExtension.swift */; }; + 5373B3CB1FC3AFF900C197BD /* VideoDataOutputSampleBufferDelegateExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3CA1FC3AFF900C197BD /* VideoDataOutputSampleBufferDelegateExtension.swift */; }; + 5373B3CD1FC3B07A00C197BD /* PhotoCaptureDelegateExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3CC1FC3B07A00C197BD /* PhotoCaptureDelegateExtension.swift */; }; + 5373B3CF1FC3B0DA00C197BD /* CapturePhotoExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3CE1FC3B0DA00C197BD /* CapturePhotoExtension.swift */; }; + 5373B3D11FC3B14D00C197BD /* MetadataOutputDelegateExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3D01FC3B14D00C197BD /* MetadataOutputDelegateExtension.swift */; }; + 5373B3D31FC3B19700C197BD /* FocusHandlerExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3D21FC3B19700C197BD /* FocusHandlerExtension.swift */; }; + 5373B3D51FC3B20200C197BD /* CaptureDeviceHandlerExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3D41FC3B20200C197BD /* CaptureDeviceHandlerExtension.swift */; }; + 5373B3D71FC3B3F000C197BD /* CameraUtilExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3D61FC3B3F000C197BD /* CameraUtilExtension.swift */; }; + 5373B3D91FC3B47E00C197BD /* SessionConfigurationExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3D81FC3B47E00C197BD /* SessionConfigurationExtension.swift */; }; + 5373B3DB1FC3B4A300C197BD /* CameraActionsExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3DA1FC3B4A300C197BD /* CameraActionsExtension.swift */; }; + 5373B3DF1FC3B8B600C197BD /* LuminaPhotoCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3DE1FC3B8B600C197BD /* LuminaPhotoCapture.swift */; }; + 5373B3E21FC3D47500C197BD /* ViewControllerFocusHandlerExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3E11FC3D47500C197BD /* ViewControllerFocusHandlerExtension.swift */; }; + 5373B3E41FC3D4DB00C197BD /* ViewControllerButtonFunctions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3E31FC3D4DB00C197BD /* ViewControllerButtonFunctions.swift */; }; + 5373B3E71FC3D50300C197BD /* GestureRecognizerDelegateExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3E61FC3D50300C197BD /* GestureRecognizerDelegateExtension.swift */; }; + 5373B3E91FC3D5AA00C197BD /* LuminaCameraDelegateExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3E81FC3D5AA00C197BD /* LuminaCameraDelegateExtension.swift */; }; + 5373B3EB1FC3D5D700C197BD /* InterfaceHandlerExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3EA1FC3D5D700C197BD /* InterfaceHandlerExtension.swift */; }; + 5373B3ED1FC3D65500C197BD /* LuminaCameraViewSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3EC1FC3D65500C197BD /* LuminaCameraViewSettings.swift */; }; + 5373B3EF1FC3D6CD00C197BD /* LuminaDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5373B3EE1FC3D6CD00C197BD /* LuminaDelegate.swift */; }; 53806FA41F6618230009A0E8 /* LuminaCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 53806FA31F6618230009A0E8 /* LuminaCamera.swift */; }; 53806FA61F672F780009A0E8 /* LuminaButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 53806FA51F672F780009A0E8 /* LuminaButton.swift */; }; 539848301EB8F14300F77AEA /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 5398482F1EB8F14300F77AEA /* AVFoundation.framework */; }; @@ -34,6 +54,26 @@ 509FF65F1F9D2F6300FD62CD /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 5332084D1F9A99CA008354F7 /* LuminaDeviceUtil.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaDeviceUtil.swift; sourceTree = ""; }; 5365BFD51F79941900B8F338 /* LuminaObjectRecognizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaObjectRecognizer.swift; sourceTree = ""; }; + 5373B3C31FC386C600C197BD /* FileRecordingExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FileRecordingExtension.swift; sourceTree = ""; }; + 5373B3C51FC387EE00C197BD /* DepthDataExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthDataExtension.swift; sourceTree = ""; }; + 5373B3C71FC3882900C197BD /* SampleBufferExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleBufferExtension.swift; sourceTree = ""; }; + 5373B3CA1FC3AFF900C197BD /* VideoDataOutputSampleBufferDelegateExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoDataOutputSampleBufferDelegateExtension.swift; sourceTree = ""; }; + 5373B3CC1FC3B07A00C197BD /* PhotoCaptureDelegateExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegateExtension.swift; sourceTree = ""; }; + 5373B3CE1FC3B0DA00C197BD /* CapturePhotoExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturePhotoExtension.swift; sourceTree = ""; }; + 5373B3D01FC3B14D00C197BD /* MetadataOutputDelegateExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetadataOutputDelegateExtension.swift; sourceTree = ""; }; + 5373B3D21FC3B19700C197BD /* FocusHandlerExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FocusHandlerExtension.swift; sourceTree = ""; }; + 5373B3D41FC3B20200C197BD /* CaptureDeviceHandlerExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureDeviceHandlerExtension.swift; sourceTree = ""; }; + 5373B3D61FC3B3F000C197BD /* CameraUtilExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraUtilExtension.swift; sourceTree = ""; }; + 5373B3D81FC3B47E00C197BD /* SessionConfigurationExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionConfigurationExtension.swift; sourceTree = ""; }; + 5373B3DA1FC3B4A300C197BD /* CameraActionsExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraActionsExtension.swift; sourceTree = ""; }; + 5373B3DE1FC3B8B600C197BD /* LuminaPhotoCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaPhotoCapture.swift; sourceTree = ""; }; + 5373B3E11FC3D47500C197BD /* ViewControllerFocusHandlerExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewControllerFocusHandlerExtension.swift; sourceTree = ""; }; + 5373B3E31FC3D4DB00C197BD /* ViewControllerButtonFunctions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewControllerButtonFunctions.swift; sourceTree = ""; }; + 5373B3E61FC3D50300C197BD /* GestureRecognizerDelegateExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GestureRecognizerDelegateExtension.swift; sourceTree = ""; }; + 5373B3E81FC3D5AA00C197BD /* LuminaCameraDelegateExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaCameraDelegateExtension.swift; sourceTree = ""; }; + 5373B3EA1FC3D5D700C197BD /* InterfaceHandlerExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InterfaceHandlerExtension.swift; sourceTree = ""; }; + 5373B3EC1FC3D65500C197BD /* LuminaCameraViewSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaCameraViewSettings.swift; sourceTree = ""; }; + 5373B3EE1FC3D6CD00C197BD /* LuminaDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaDelegate.swift; sourceTree = ""; }; 53806FA31F6618230009A0E8 /* LuminaCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaCamera.swift; sourceTree = ""; }; 53806FA51F672F780009A0E8 /* LuminaButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LuminaButton.swift; sourceTree = ""; }; 5398482F1EB8F14300F77AEA /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; @@ -68,6 +108,88 @@ /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 5373B3C11FC3868200C197BD /* Camera */ = { + isa = PBXGroup; + children = ( + 5332084D1F9A99CA008354F7 /* LuminaDeviceUtil.swift */, + 53806FA31F6618230009A0E8 /* LuminaCamera.swift */, + 5373B3DE1FC3B8B600C197BD /* LuminaPhotoCapture.swift */, + 5365BFD51F79941900B8F338 /* LuminaObjectRecognizer.swift */, + 5373B3C21FC3868B00C197BD /* Extensions */, + ); + path = Camera; + sourceTree = ""; + }; + 5373B3C21FC3868B00C197BD /* Extensions */ = { + isa = PBXGroup; + children = ( + 5373B3DA1FC3B4A300C197BD /* CameraActionsExtension.swift */, + 5373B3D81FC3B47E00C197BD /* SessionConfigurationExtension.swift */, + 5373B3C71FC3882900C197BD /* SampleBufferExtension.swift */, + 5373B3CE1FC3B0DA00C197BD /* CapturePhotoExtension.swift */, + 5373B3D21FC3B19700C197BD /* FocusHandlerExtension.swift */, + 5373B3D41FC3B20200C197BD /* CaptureDeviceHandlerExtension.swift */, + 5373B3D61FC3B3F000C197BD /* CameraUtilExtension.swift */, + 5373B3C91FC3AF2000C197BD /* Delegates */, + ); + path = Extensions; + sourceTree = ""; + }; + 5373B3C91FC3AF2000C197BD /* Delegates */ = { + isa = PBXGroup; + children = ( + 5373B3C31FC386C600C197BD /* FileRecordingExtension.swift */, + 5373B3C51FC387EE00C197BD /* DepthDataExtension.swift */, + 5373B3CA1FC3AFF900C197BD /* VideoDataOutputSampleBufferDelegateExtension.swift */, + 5373B3CC1FC3B07A00C197BD /* PhotoCaptureDelegateExtension.swift */, + 5373B3D01FC3B14D00C197BD /* MetadataOutputDelegateExtension.swift */, + ); + path = Delegates; + sourceTree = ""; + }; + 5373B3DC1FC3B78300C197BD /* UI */ = { + isa = PBXGroup; + children = ( + 53B9CCFA1EBF6C62008E4A43 /* Media.xcassets */, + 53806FA51F672F780009A0E8 /* LuminaButton.swift */, + 53B9CCFC1EBFD22E008E4A43 /* LuminaTextPromptView.swift */, + 53B8290E1EAAA24600E3A624 /* LuminaViewController.swift */, + 5373B3EC1FC3D65500C197BD /* LuminaCameraViewSettings.swift */, + 5373B3E01FC3D44500C197BD /* Extensions */, + ); + path = UI; + sourceTree = ""; + }; + 5373B3DD1FC3B7B600C197BD /* Util */ = { + isa = PBXGroup; + children = ( + 53B828D31EAAA07F00E3A624 /* Lumina.h */, + 53B828D41EAAA07F00E3A624 /* Info.plist */, + ); + path = Util; + sourceTree = ""; + }; + 5373B3E01FC3D44500C197BD /* Extensions */ = { + isa = PBXGroup; + children = ( + 5373B3E11FC3D47500C197BD /* ViewControllerFocusHandlerExtension.swift */, + 5373B3E31FC3D4DB00C197BD /* ViewControllerButtonFunctions.swift */, + 5373B3EA1FC3D5D700C197BD /* InterfaceHandlerExtension.swift */, + 5373B3E51FC3D4F100C197BD /* Delegates */, + ); + path = Extensions; + sourceTree = ""; + }; + 5373B3E51FC3D4F100C197BD /* Delegates */ = { + isa = PBXGroup; + children = ( + 5373B3E61FC3D50300C197BD /* GestureRecognizerDelegateExtension.swift */, + 5373B3E81FC3D5AA00C197BD /* LuminaCameraDelegateExtension.swift */, + 5373B3EE1FC3D6CD00C197BD /* LuminaDelegate.swift */, + ); + path = Delegates; + sourceTree = ""; + }; 5398482E1EB8F14300F77AEA /* Frameworks */ = { isa = PBXGroup; children = ( @@ -99,15 +221,9 @@ 53B828D21EAAA07F00E3A624 /* Lumina */ = { isa = PBXGroup; children = ( - 53B828D31EAAA07F00E3A624 /* Lumina.h */, - 53B828D41EAAA07F00E3A624 /* Info.plist */, - 53B8290E1EAAA24600E3A624 /* LuminaViewController.swift */, - 53806FA31F6618230009A0E8 /* LuminaCamera.swift */, - 5365BFD51F79941900B8F338 /* LuminaObjectRecognizer.swift */, - 53B9CCFC1EBFD22E008E4A43 /* LuminaTextPromptView.swift */, - 53806FA51F672F780009A0E8 /* LuminaButton.swift */, - 5332084D1F9A99CA008354F7 /* LuminaDeviceUtil.swift */, - 53B9CCFA1EBF6C62008E4A43 /* Media.xcassets */, + 5373B3DD1FC3B7B600C197BD /* Util */, + 5373B3DC1FC3B78300C197BD /* UI */, + 5373B3C11FC3868200C197BD /* Camera */, ); path = Lumina; sourceTree = ""; @@ -254,12 +370,32 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 5373B3DB1FC3B4A300C197BD /* CameraActionsExtension.swift in Sources */, 53806FA41F6618230009A0E8 /* LuminaCamera.swift in Sources */, + 5373B3D31FC3B19700C197BD /* FocusHandlerExtension.swift in Sources */, + 5373B3DF1FC3B8B600C197BD /* LuminaPhotoCapture.swift in Sources */, 53B8290F1EAAA24600E3A624 /* LuminaViewController.swift in Sources */, + 5373B3C41FC386C600C197BD /* FileRecordingExtension.swift in Sources */, + 5373B3EF1FC3D6CD00C197BD /* LuminaDelegate.swift in Sources */, + 5373B3ED1FC3D65500C197BD /* LuminaCameraViewSettings.swift in Sources */, + 5373B3EB1FC3D5D700C197BD /* InterfaceHandlerExtension.swift in Sources */, + 5373B3E21FC3D47500C197BD /* ViewControllerFocusHandlerExtension.swift in Sources */, + 5373B3CB1FC3AFF900C197BD /* VideoDataOutputSampleBufferDelegateExtension.swift in Sources */, 53806FA61F672F780009A0E8 /* LuminaButton.swift in Sources */, 53B9CCFD1EBFD22E008E4A43 /* LuminaTextPromptView.swift in Sources */, + 5373B3E91FC3D5AA00C197BD /* LuminaCameraDelegateExtension.swift in Sources */, 5365BFD61F79941900B8F338 /* LuminaObjectRecognizer.swift in Sources */, + 5373B3D71FC3B3F000C197BD /* CameraUtilExtension.swift in Sources */, 5332084E1F9A99CA008354F7 /* LuminaDeviceUtil.swift in Sources */, + 5373B3D91FC3B47E00C197BD /* SessionConfigurationExtension.swift in Sources */, + 5373B3CD1FC3B07A00C197BD /* PhotoCaptureDelegateExtension.swift in Sources */, + 5373B3CF1FC3B0DA00C197BD /* CapturePhotoExtension.swift in Sources */, + 5373B3E71FC3D50300C197BD /* GestureRecognizerDelegateExtension.swift in Sources */, + 5373B3C61FC387EE00C197BD /* DepthDataExtension.swift in Sources */, + 5373B3C81FC3882900C197BD /* SampleBufferExtension.swift in Sources */, + 5373B3D11FC3B14D00C197BD /* MetadataOutputDelegateExtension.swift in Sources */, + 5373B3D51FC3B20200C197BD /* CaptureDeviceHandlerExtension.swift in Sources */, + 5373B3E41FC3D4DB00C197BD /* ViewControllerButtonFunctions.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -403,7 +539,7 @@ DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_CURRENT_VERSION = 1; DYLIB_INSTALL_NAME_BASE = "@rpath"; - INFOPLIST_FILE = Lumina/Info.plist; + INFOPLIST_FILE = "$(SRCROOT)/Lumina/Util/Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; @@ -426,7 +562,7 @@ DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_CURRENT_VERSION = 1; DYLIB_INSTALL_NAME_BASE = "@rpath"; - INFOPLIST_FILE = Lumina/Info.plist; + INFOPLIST_FILE = "$(SRCROOT)/Lumina/Util/Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; diff --git a/Lumina/Lumina/Camera/Extensions/CameraActionsExtension.swift b/Lumina/Lumina/Camera/Extensions/CameraActionsExtension.swift new file mode 100644 index 0000000..931499b --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/CameraActionsExtension.swift @@ -0,0 +1,68 @@ +// +// CameraActionsExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera { + func getPreviewLayer() -> AVCaptureVideoPreviewLayer? { + let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) + previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill + return previewLayer + } + + func captureStillImage() { + var settings = AVCapturePhotoSettings() + if #available(iOS 11.0, *) { + if self.photoOutput.availablePhotoCodecTypes.contains(.hevc) { + settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) + } + } + settings.isAutoStillImageStabilizationEnabled = true + settings.flashMode = self.torchState ? .on : .off + if self.captureLivePhotos { + let fileName = NSTemporaryDirectory().appending("livePhoto" + Date().iso8601 + ".mov") + settings.livePhotoMovieFileURL = URL(fileURLWithPath: fileName) + } + if self.captureHighResolutionImages { + settings.isHighResolutionPhotoEnabled = true + } + if #available(iOS 11.0, *) { + if self.captureDepthData && self.photoOutput.isDepthDataDeliverySupported { + settings.isDepthDataDeliveryEnabled = true + } + } + self.photoOutput.capturePhoto(with: settings, delegate: self) + } + + func startVideoRecording() { + if self.resolution == .photo { + return // TODO: make this function throw an error + } + recordingVideo = true + sessionQueue.async { + if let connection = self.videoFileOutput.connection(with: AVMediaType.video), let videoConnection = self.videoDataOutput.connection(with: AVMediaType.video) { + connection.videoOrientation = videoConnection.videoOrientation + connection.isVideoMirrored = self.position == .front ? true : false + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = .cinematic + } + self.session.commitConfiguration() + } + let fileName = NSTemporaryDirectory().appending(Date().iso8601 + ".mov") + self.videoFileOutput.startRecording(to: URL(fileURLWithPath: fileName), recordingDelegate: self) + } + } + + func stopVideoRecording() { + recordingVideo = false + sessionQueue.async { + self.videoFileOutput.stopRecording() + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/CameraUtilExtension.swift b/Lumina/Lumina/Camera/Extensions/CameraUtilExtension.swift new file mode 100644 index 0000000..8329b66 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/CameraUtilExtension.swift @@ -0,0 +1,25 @@ +// +// CameraUtilExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation + +extension Formatter { + static let iso8601: DateFormatter = { + let formatter = DateFormatter() + formatter.calendar = Calendar(identifier: .iso8601) + formatter.locale = Locale(identifier: "en_US_POSIX") + formatter.timeZone = TimeZone(secondsFromGMT: 0) + formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX" + return formatter + }() +} +extension Date { + var iso8601: String { + return Formatter.iso8601.string(from: self) + } +} diff --git a/Lumina/Lumina/Camera/Extensions/CaptureDeviceHandlerExtension.swift b/Lumina/Lumina/Camera/Extensions/CaptureDeviceHandlerExtension.swift new file mode 100644 index 0000000..235a7df --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/CaptureDeviceHandlerExtension.swift @@ -0,0 +1,122 @@ +// +// CaptureDeviceHandlerExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera { + func getNewVideoInputDevice() -> AVCaptureDeviceInput? { + do { + guard let device = getDevice(with: self.position == .front ? AVCaptureDevice.Position.front : AVCaptureDevice.Position.back) else { + print("could not find valid AVCaptureDevice") + return nil + } + let input = try AVCaptureDeviceInput(device: device) + return input + } catch { + return nil + } + } + + func getNewAudioInputDevice() -> AVCaptureDeviceInput? { + do { + guard let device = AVCaptureDevice.default(for: AVMediaType.audio) else { + return nil + } + let deviceInput = try AVCaptureDeviceInput(device: device) + return deviceInput + } catch { + return nil + } + } + + func purgeAudioDevices() { + for oldInput in self.session.inputs where oldInput == self.audioInput { + self.session.removeInput(oldInput) + } + } + + func purgeVideoDevices() { + for oldInput in self.session.inputs where oldInput == self.videoInput { + self.session.removeInput(oldInput) + } + for oldOutput in self.session.outputs { + if oldOutput == self.videoDataOutput || oldOutput == self.photoOutput || oldOutput == self.metadataOutput || oldOutput == self.videoFileOutput { + self.session.removeOutput(oldOutput) + } + if let dataOutput = oldOutput as? AVCaptureVideoDataOutput { + self.session.removeOutput(dataOutput) + } + if #available(iOS 11.0, *) { + if let depthOutput = oldOutput as? AVCaptureDepthDataOutput { + self.session.removeOutput(depthOutput) + } + } + } + } + + func getDevice(with position: AVCaptureDevice.Position) -> AVCaptureDevice? { + #if swift(>=4.0.2) + if #available(iOS 11.1, *), position == .front { + if let device = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .video, position: .front) { + return device + } + } + #endif + if #available(iOS 10.2, *), let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: position) { + return device + } else if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position) { + return device + } + return nil + } + + func configureFrameRate() { + guard let device = self.currentCaptureDevice else { + return + } + for vFormat in device.formats { + let dimensions = CMVideoFormatDescriptionGetDimensions(vFormat.formatDescription) + let ranges = vFormat.videoSupportedFrameRateRanges as [AVFrameRateRange] + guard let frameRate = ranges.first else { + continue + } + if frameRate.maxFrameRate >= Float64(self.frameRate) && + frameRate.minFrameRate <= Float64(self.frameRate) && + self.resolution.getDimensions().width == dimensions.width && + self.resolution.getDimensions().height == dimensions.height && + CMFormatDescriptionGetMediaSubType(vFormat.formatDescription) == 875704422 { // meant for full range 420f + do { + try device.lockForConfiguration() + device.activeFormat = vFormat as AVCaptureDevice.Format + device.activeVideoMinFrameDuration = CMTimeMake(1, Int32(self.frameRate)) + device.activeVideoMaxFrameDuration = CMTimeMake(1, Int32(self.frameRate)) + device.unlockForConfiguration() + break + } catch { + continue + } + } + } + } + + func updateZoom() { + guard let input = self.videoInput else { + return + } + let device = input.device + do { + try device.lockForConfiguration() + let newZoomScale = min(maxZoomScale, max(Float(1.0), min(currentZoomScale, Float(device.activeFormat.videoMaxZoomFactor)))) + device.videoZoomFactor = CGFloat(newZoomScale) + device.unlockForConfiguration() + } catch { + device.unlockForConfiguration() + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/CapturePhotoExtension.swift b/Lumina/Lumina/Camera/Extensions/CapturePhotoExtension.swift new file mode 100644 index 0000000..8f13808 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/CapturePhotoExtension.swift @@ -0,0 +1,35 @@ +// +// CapturePhotoExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +@available (iOS 11.0, *) +extension AVCapturePhoto { + func normalizedImage(forCameraPosition position: CameraPosition) -> UIImage? { + guard let cgImage = self.cgImageRepresentation() else { + return nil + } + return UIImage(cgImage: cgImage.takeUnretainedValue(), scale: 1.0, orientation: getImageOrientation(forCamera: position)) + } + + private func getImageOrientation(forCamera: CameraPosition) -> UIImageOrientation { + switch UIApplication.shared.statusBarOrientation { + case .landscapeLeft: + return forCamera == .back ? .down : .upMirrored + case .landscapeRight: + return forCamera == .back ? .up : .downMirrored + case .portraitUpsideDown: + return forCamera == .back ? .left : .rightMirrored + case .portrait: + return forCamera == .back ? .right : .leftMirrored + case .unknown: + return forCamera == .back ? .right : .leftMirrored + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/Delegates/DepthDataExtension.swift b/Lumina/Lumina/Camera/Extensions/Delegates/DepthDataExtension.swift new file mode 100644 index 0000000..a06f924 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/Delegates/DepthDataExtension.swift @@ -0,0 +1,23 @@ +// +// DepthDataExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +@available(iOS 11.0, *) +extension LuminaCamera: AVCaptureDepthDataOutputDelegate { + func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) { + DispatchQueue.main.async { + self.delegate?.depthDataCaptured(camera: self, depthData: depthData) + } + } + + func depthDataOutput(_ output: AVCaptureDepthDataOutput, didDrop depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection, reason: AVCaptureOutput.DataDroppedReason) { + // place to handle dropped AVDepthData if we need it + } +} diff --git a/Lumina/Lumina/Camera/Extensions/Delegates/FileRecordingExtension.swift b/Lumina/Lumina/Camera/Extensions/Delegates/FileRecordingExtension.swift new file mode 100644 index 0000000..e15e322 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/Delegates/FileRecordingExtension.swift @@ -0,0 +1,51 @@ +// +// LuminaCamera+FileOutputRecordingDelegate.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera: AVCaptureFileOutputRecordingDelegate { + func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { + DispatchQueue.main.async { + if error == nil, let delegate = self.delegate { + delegate.videoRecordingCaptured(camera: self, videoURL: outputFileURL) + } + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + if self.captureLivePhotos { + self.delegate?.cameraBeganTakingLivePhoto(camera: self) + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) { + if self.captureLivePhotos { + self.delegate?.cameraFinishedTakingLivePhoto(camera: self) + } + } + + //swiftlint:disable function_parameter_count + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) { + photoCollectionQueue.sync { + if self.currentPhotoCollection == nil { + var collection = LuminaPhotoCapture() + collection.camera = self + collection.livePhotoURL = outputFileURL + self.currentPhotoCollection = collection + } else { + guard var collection = self.currentPhotoCollection else { + return + } + collection.camera = self + collection.livePhotoURL = outputFileURL + self.currentPhotoCollection = collection + } + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/Delegates/MetadataOutputDelegateExtension.swift b/Lumina/Lumina/Camera/Extensions/Delegates/MetadataOutputDelegateExtension.swift new file mode 100644 index 0000000..81151a3 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/Delegates/MetadataOutputDelegateExtension.swift @@ -0,0 +1,21 @@ +// +// MetadataOutputDelegateExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera: AVCaptureMetadataOutputObjectsDelegate { + func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { + guard case self.trackMetadata = true else { + return + } + DispatchQueue.main.async { + self.delegate?.detected(camera: self, metadata: metadataObjects) + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/Delegates/PhotoCaptureDelegateExtension.swift b/Lumina/Lumina/Camera/Extensions/Delegates/PhotoCaptureDelegateExtension.swift new file mode 100644 index 0000000..94bb835 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/Delegates/PhotoCaptureDelegateExtension.swift @@ -0,0 +1,51 @@ +// +// PhotoCaptureDelegateExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera: AVCapturePhotoCaptureDelegate { + @available (iOS 11.0, *) + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + guard let image = photo.normalizedImage(forCameraPosition: self.position) else { + return + } + photoCollectionQueue.sync { + if self.currentPhotoCollection == nil { + var collection = LuminaPhotoCapture() + collection.camera = self + collection.depthData = photo.depthData + collection.stillImage = image + self.currentPhotoCollection = collection + } else { + guard var collection = self.currentPhotoCollection else { + return + } + collection.camera = self + collection.depthData = photo.depthData + collection.stillImage = image + self.currentPhotoCollection = collection + } + } + } + + // swiftlint:disable function_parameter_count + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { + if #available(iOS 11.0, *) { // make use of AVCapturePhotoOutput + return + } else { + guard let buffer = photoSampleBuffer else { + return + } + guard let image = buffer.normalizedStillImage(forCameraPosition: self.position) else { + return + } + delegate?.stillImageCaptured(camera: self, image: image, livePhotoURL: nil, depthData: nil) + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/Delegates/VideoDataOutputSampleBufferDelegateExtension.swift b/Lumina/Lumina/Camera/Extensions/Delegates/VideoDataOutputSampleBufferDelegateExtension.swift new file mode 100644 index 0000000..bc27704 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/Delegates/VideoDataOutputSampleBufferDelegateExtension.swift @@ -0,0 +1,35 @@ +// +// VideoDataOutputSampleBufferDelegate.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera: AVCaptureVideoDataOutputSampleBufferDelegate { + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + guard let image = sampleBuffer.normalizedVideoFrame() else { + return + } + if #available(iOS 11.0, *) { + guard let recognizer = self.recognizer as? LuminaObjectRecognizer else { + DispatchQueue.main.async { + self.delegate?.videoFrameCaptured(camera: self, frame: image) + } + return + } + recognizer.recognize(from: image, completion: { predictions in + DispatchQueue.main.async { + self.delegate?.videoFrameCaptured(camera: self, frame: image, predictedObjects: predictions) + } + }) + } else { + DispatchQueue.main.async { + self.delegate?.videoFrameCaptured(camera: self, frame: image) + } + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/FocusHandlerExtension.swift b/Lumina/Lumina/Camera/Extensions/FocusHandlerExtension.swift new file mode 100644 index 0000000..ad53ce8 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/FocusHandlerExtension.swift @@ -0,0 +1,55 @@ +// +// FocusHandlerExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera { + func handleFocus(at focusPoint: CGPoint) { + self.sessionQueue.async { + guard let input = self.videoInput else { + return + } + do { + if input.device.isFocusModeSupported(.autoFocus) && input.device.isFocusPointOfInterestSupported { + try input.device.lockForConfiguration() + input.device.focusMode = .autoFocus + input.device.focusPointOfInterest = CGPoint(x: focusPoint.x, y: focusPoint.y) + if input.device.isExposureModeSupported(.autoExpose) && input.device.isExposurePointOfInterestSupported { + input.device.exposureMode = .autoExpose + input.device.exposurePointOfInterest = CGPoint(x: focusPoint.x, y: focusPoint.y) + } + input.device.unlockForConfiguration() + } else { + self.delegate?.finishedFocus(camera: self) + } + } catch { + self.delegate?.finishedFocus(camera: self) + } + } + } + + func resetCameraToContinuousExposureAndFocus() { + do { + guard let input = self.videoInput else { + print("Trying to focus, but cannot detect device input!") + return + } + if input.device.isFocusModeSupported(.continuousAutoFocus) { + try input.device.lockForConfiguration() + input.device.focusMode = .autoFocus + if input.device.isExposureModeSupported(.continuousAutoExposure) { + input.device.exposureMode = .continuousAutoExposure + } + input.device.unlockForConfiguration() + } + } catch { + print("could not reset to continuous auto focus and exposure!!") + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/SampleBufferExtension.swift b/Lumina/Lumina/Camera/Extensions/SampleBufferExtension.swift new file mode 100644 index 0000000..44baa99 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/SampleBufferExtension.swift @@ -0,0 +1,52 @@ +// +// SampleBufferExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension CMSampleBuffer { + func normalizedStillImage(forCameraPosition position: CameraPosition) -> UIImage? { + guard let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: self, previewPhotoSampleBuffer: nil) else { + return nil + } + guard let dataProvider = CGDataProvider(data: imageData as CFData) else { + return nil + } + guard let cgImageRef = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent) else { + return nil + } + return UIImage(cgImage: cgImageRef, scale: 1.0, orientation: getImageOrientation(forCamera: position)) + } + + func normalizedVideoFrame() -> UIImage? { + guard let imageBuffer = CMSampleBufferGetImageBuffer(self) else { + return nil + } + let coreImage: CIImage = CIImage(cvPixelBuffer: imageBuffer) + let context: CIContext = CIContext() + guard let sample: CGImage = context.createCGImage(coreImage, from: coreImage.extent) else { + return nil + } + return UIImage(cgImage: sample) + } + + private func getImageOrientation(forCamera: CameraPosition) -> UIImageOrientation { + switch UIApplication.shared.statusBarOrientation { + case .landscapeLeft: + return forCamera == .back ? .down : .upMirrored + case .landscapeRight: + return forCamera == .back ? .up : .downMirrored + case .portraitUpsideDown: + return forCamera == .back ? .left : .rightMirrored + case .portrait: + return forCamera == .back ? .right : .leftMirrored + case .unknown: + return forCamera == .back ? .right : .leftMirrored + } + } +} diff --git a/Lumina/Lumina/Camera/Extensions/SessionConfigurationExtension.swift b/Lumina/Lumina/Camera/Extensions/SessionConfigurationExtension.swift new file mode 100644 index 0000000..21483f8 --- /dev/null +++ b/Lumina/Lumina/Camera/Extensions/SessionConfigurationExtension.swift @@ -0,0 +1,218 @@ +// +// SessionConfigurationExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaCamera { + func requestVideoPermissions() { + self.sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video) { success in + if success { + self.sessionQueue.resume() + self.delegate?.cameraSetupCompleted(camera: self, result: .requiresUpdate) + } else { + self.delegate?.cameraSetupCompleted(camera: self, result: .videoPermissionDenied) + } + } + } + + func requestAudioPermissions() { + self.sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: AVMediaType.audio) { success in + if success { + self.sessionQueue.resume() + self.delegate?.cameraSetupCompleted(camera: self, result: .requiresUpdate) + } else { + self.delegate?.cameraSetupCompleted(camera: self, result: .audioPermissionDenied) + } + } + } + + func updateOutputVideoOrientation(_ orientation: AVCaptureVideoOrientation) { + self.videoBufferQueue.async { + for output in self.session.outputs { + guard let connection = output.connection(with: AVMediaType.video) else { + continue + } + if connection.isVideoOrientationSupported { + connection.videoOrientation = orientation + } + } + } + } + + func restartVideo() { + if self.session.isRunning { + self.session.stopRunning() + updateVideo({ result in + if result == .videoSuccess { + self.start() + } else { + self.delegate?.cameraSetupCompleted(camera: self, result: result) + } + }) + } + } + + func updateAudio(_ completion: @escaping (_ result: CameraSetupResult) -> Void) { + self.sessionQueue.async { + self.purgeAudioDevices() + switch AVCaptureDevice.authorizationStatus(for: AVMediaType.audio) { + case .authorized: + guard let audioInput = self.getNewAudioInputDevice() else { + completion(CameraSetupResult.invalidAudioInput) + return + } + guard self.session.canAddInput(audioInput) else { + completion(CameraSetupResult.invalidAudioInput) + return + } + self.audioInput = audioInput + self.session.addInput(audioInput) + completion(CameraSetupResult.audioSuccess) + return + case .denied: + completion(CameraSetupResult.audioPermissionDenied) + return + case .notDetermined: + completion(CameraSetupResult.audioRequiresAuthorization) + return + case .restricted: + completion(CameraSetupResult.audioPermissionRestricted) + return + } + } + } + + func updateVideo(_ completion: @escaping (_ result: CameraSetupResult) -> Void) { + self.sessionQueue.async { + self.purgeVideoDevices() + switch AVCaptureDevice.authorizationStatus(for: AVMediaType.video) { + case .authorized: + completion(self.videoSetupApproved()) + case .denied: + completion(CameraSetupResult.videoPermissionDenied) + return + case .notDetermined: + completion(CameraSetupResult.videoRequiresAuthorization) + return + case .restricted: + completion(CameraSetupResult.videoPermissionRestricted) + return + } + } + } + + private func videoSetupApproved() -> CameraSetupResult { + self.torchState = false + self.session.sessionPreset = .high // set to high here so that device input can be added to session. resolution can be checked for update later + guard let videoInput = self.getNewVideoInputDevice() else { + return .invalidVideoInput + } + if let failureResult = checkSessionValidity(for: videoInput) { + return failureResult + } + self.videoInput = videoInput + self.session.addInput(videoInput) + if self.streamFrames { + self.session.addOutput(self.videoDataOutput) + } + self.session.addOutput(self.photoOutput) + self.session.commitConfiguration() + if self.session.canSetSessionPreset(self.resolution.foundationPreset()) { + self.session.sessionPreset = self.resolution.foundationPreset() + } + configureVideoRecordingOutput(for: self.session) + configureMetadataOutput(for: self.session) + configureHiResPhotoOutput(for: self.session) + configureLivePhotoOutput(for: self.session) + configureDepthDataOutput(for: self.session) + configureFrameRate() + return .videoSuccess + } + + private func checkSessionValidity(for input: AVCaptureDeviceInput) -> CameraSetupResult? { + guard self.session.canAddInput(input) else { + return .invalidVideoInput + } + guard self.session.canAddOutput(self.videoDataOutput) else { + return .invalidVideoDataOutput + } + guard self.session.canAddOutput(self.photoOutput) else { + return .invalidPhotoOutput + } + guard self.session.canAddOutput(self.metadataOutput) else { + return .invalidVideoMetadataOutput + } + if self.recordsVideo == true { + guard self.session.canAddOutput(self.videoFileOutput) else { + return .invalidVideoFileOutput + } + } + if #available(iOS 11.0, *), let depthDataOutput = self.depthDataOutput { + guard self.session.canAddOutput(depthDataOutput) else { + return .invalidDepthDataOutput + } + } + return nil + } + + private func configureVideoRecordingOutput(for session: AVCaptureSession) { + if self.recordsVideo { + // adding this invalidates the video data output + self.session.addOutput(self.videoFileOutput) + if let connection = self.videoFileOutput.connection(with: .video) { + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = .auto + } + } + } + } + + private func configureHiResPhotoOutput(for session: AVCaptureSession) { + if self.captureHighResolutionImages && self.photoOutput.isHighResolutionCaptureEnabled { + self.photoOutput.isHighResolutionCaptureEnabled = true + } else { + self.captureHighResolutionImages = false + } + } + + private func configureLivePhotoOutput(for session: AVCaptureSession) { + if self.captureLivePhotos && self.photoOutput.isLivePhotoCaptureSupported { + self.photoOutput.isLivePhotoCaptureEnabled = true + } else { + self.captureLivePhotos = false + } + } + + private func configureMetadataOutput(for session: AVCaptureSession) { + if self.trackMetadata { + session.addOutput(self.metadataOutput) + self.metadataOutput.metadataObjectTypes = self.metadataOutput.availableMetadataObjectTypes + } + } + + private func configureDepthDataOutput(for session: AVCaptureSession) { + if #available(iOS 11.0, *) { + if self.captureDepthData && self.photoOutput.isDepthDataDeliverySupported { + self.photoOutput.isDepthDataDeliveryEnabled = true + } else { + self.captureDepthData = false + } + } else { + self.captureDepthData = false + } + if #available(iOS 11.0, *) { + if self.streamDepthData, let depthDataOutput = self.depthDataOutput { + session.addOutput(depthDataOutput) + session.commitConfiguration() + } + } + } +} diff --git a/Lumina/Lumina/Camera/LuminaCamera.swift b/Lumina/Lumina/Camera/LuminaCamera.swift new file mode 100644 index 0000000..a1a4edb --- /dev/null +++ b/Lumina/Lumina/Camera/LuminaCamera.swift @@ -0,0 +1,245 @@ +// +// Camera.swift +// CameraFramework +// +// Created by David Okun on 8/31/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import UIKit +import AVFoundation +import CoreML + +protocol LuminaCameraDelegate: class { + func stillImageCaptured(camera: LuminaCamera, image: UIImage, livePhotoURL: URL?, depthData: Any?) + func videoFrameCaptured(camera: LuminaCamera, frame: UIImage) + func videoFrameCaptured(camera: LuminaCamera, frame: UIImage, predictedObjects: [LuminaPrediction]?) + func depthDataCaptured(camera: LuminaCamera, depthData: Any) + func videoRecordingCaptured(camera: LuminaCamera, videoURL: URL) + func finishedFocus(camera: LuminaCamera) + func detected(camera: LuminaCamera, metadata: [Any]) + func cameraSetupCompleted(camera: LuminaCamera, result: CameraSetupResult) + func cameraBeganTakingLivePhoto(camera: LuminaCamera) + func cameraFinishedTakingLivePhoto(camera: LuminaCamera) +} + +enum CameraSetupResult: String { + typealias RawValue = String + case videoPermissionDenied = "Video Permissions Denied" + case videoPermissionRestricted = "Video Permissions Restricted" + case videoRequiresAuthorization = "Video Permissions Require Authorization" + case audioPermissionDenied = "Audio Permissions Denied" + case audioPermissionRestricted = "Audio Permissions Restricted" + case audioRequiresAuthorization = "Audio Permissions Require Authorization" + case unknownError = "Unknown Error" + case invalidVideoDataOutput = "Invalid Video Data Output" + case invalidVideoFileOutput = "Invalid Video File Output" + case invalidVideoMetadataOutput = "Invalid Video Metadata Output" + case invalidPhotoOutput = "Invalid Photo Output" + case invalidDepthDataOutput = "Invalid Depth Data Output" + case invalidVideoInput = "Invalid Video Input" + case invalidAudioInput = "Invalid Audio Input" + case requiresUpdate = "Requires AV Update" + case videoSuccess = "Video Setup Success" + case audioSuccess = "Audio Setup Success" +} + +final class LuminaCamera: NSObject { + weak var delegate: LuminaCameraDelegate? + + var torchState = false { + didSet { + guard let input = self.videoInput else { + torchState = false + return + } + do { + try input.device.lockForConfiguration() + if torchState == false { + if input.device.isTorchModeSupported(.off) { + input.device.torchMode = .off + input.device.unlockForConfiguration() + } + } else { + if input.device.isTorchModeSupported(.on) { + input.device.torchMode = .on + input.device.unlockForConfiguration() + } + } + } catch { + torchState = false + input.device.unlockForConfiguration() + } + } + } + + var recordsVideo = false { + didSet { + restartVideo() + } + } + + var streamFrames = false { + didSet { + restartVideo() + } + } + + var trackMetadata = false { + didSet { + restartVideo() + } + } + + var captureLivePhotos = false { + didSet { + restartVideo() + } + } + + var captureDepthData = false { + didSet { + restartVideo() + } + } + + var streamDepthData = false { + didSet { + restartVideo() + } + } + + var captureHighResolutionImages = false { + didSet { + restartVideo() + } + } + + var recordingVideo: Bool = false + + var position: CameraPosition = .back { + didSet { + restartVideo() + } + } + + var resolution: CameraResolution = .highest { + didSet { + restartVideo() + } + } + + var frameRate: Int = 30 { + didSet { + restartVideo() + } + } + + var maxZoomScale: Float = MAXFLOAT + + var currentZoomScale: Float = 1.0 { + didSet { + updateZoom() + } + } + + var currentPhotoCollection: LuminaPhotoCapture? + + var recognizer: AnyObject? + + private var _streamingModel: AnyObject? + @available(iOS 11.0, *) + var streamingModel: MLModel? { + get { + return _streamingModel as? MLModel + } + set { + if newValue != nil { + _streamingModel = newValue + recognizer = LuminaObjectRecognizer(model: newValue!) + } + } + } + + var session = AVCaptureSession() + + fileprivate var discoverySession: AVCaptureDevice.DiscoverySession? { + var deviceTypes = [AVCaptureDevice.DeviceType]() + deviceTypes.append(.builtInWideAngleCamera) + if #available(iOS 10.2, *) { + deviceTypes.append(.builtInDualCamera) + } + #if swift(>=4.0.2) // Xcode 9.1 shipped with Swift 4.0.2 + if #available(iOS 11.1, *), self.captureDepthData == true { + deviceTypes.append(.builtInTrueDepthCamera) + } + #endif + return AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified) + } + + var videoInput: AVCaptureDeviceInput? + var audioInput: AVCaptureDeviceInput? + var currentCaptureDevice: AVCaptureDevice? + var videoBufferQueue = DispatchQueue(label: "com.Lumina.videoBufferQueue", attributes: .concurrent) + var metadataBufferQueue = DispatchQueue(label: "com.lumina.metadataBufferQueue") + var recognitionBufferQueue = DispatchQueue(label: "com.lumina.recognitionBufferQueue") + var sessionQueue = DispatchQueue(label: "com.lumina.sessionQueue") + var photoCollectionQueue = DispatchQueue(label: "com.lumina.photoCollectionQueue") + var depthDataQueue = DispatchQueue(label: "com.lumina.depthDataQueue") + + var videoDataOutput: AVCaptureVideoDataOutput { + let output = AVCaptureVideoDataOutput() + output.alwaysDiscardsLateVideoFrames = true + output.setSampleBufferDelegate(self, queue: videoBufferQueue) + return output + } + var photoOutput = AVCapturePhotoOutput() + + private var _metadataOutput: AVCaptureMetadataOutput? + var metadataOutput: AVCaptureMetadataOutput { + if let existingOutput = _metadataOutput { + return existingOutput + } + let output = AVCaptureMetadataOutput() + output.setMetadataObjectsDelegate(self, queue: metadataBufferQueue) + _metadataOutput = output + return output + } + + private var _videoFileOutput: AVCaptureMovieFileOutput? + var videoFileOutput: AVCaptureMovieFileOutput { + if let existingOutput = _videoFileOutput { + return existingOutput + } + let output = AVCaptureMovieFileOutput() + _videoFileOutput = output + return output + } + + private var _depthDataOutput: AnyObject? + @available(iOS 11.0, *) + var depthDataOutput: AVCaptureDepthDataOutput? { + get { + if let existingOutput = _depthDataOutput { + return existingOutput as? AVCaptureDepthDataOutput + } + let output = AVCaptureDepthDataOutput() + output.setDelegate(self, callbackQueue: depthDataQueue) + _depthDataOutput = output + return output + } + set { + _depthDataOutput = newValue + } + } + + func start() { + self.sessionQueue.async { + self.session.startRunning() + } + } + + func stop() { + self.session.stopRunning() + } +} diff --git a/Lumina/Lumina/LuminaDeviceUtil.swift b/Lumina/Lumina/Camera/LuminaDeviceUtil.swift similarity index 100% rename from Lumina/Lumina/LuminaDeviceUtil.swift rename to Lumina/Lumina/Camera/LuminaDeviceUtil.swift diff --git a/Lumina/Lumina/LuminaObjectRecognizer.swift b/Lumina/Lumina/Camera/LuminaObjectRecognizer.swift similarity index 100% rename from Lumina/Lumina/LuminaObjectRecognizer.swift rename to Lumina/Lumina/Camera/LuminaObjectRecognizer.swift diff --git a/Lumina/Lumina/Camera/LuminaPhotoCapture.swift b/Lumina/Lumina/Camera/LuminaPhotoCapture.swift new file mode 100644 index 0000000..4fe1551 --- /dev/null +++ b/Lumina/Lumina/Camera/LuminaPhotoCapture.swift @@ -0,0 +1,66 @@ +// +// LuminaPhotoCapture.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +struct LuminaPhotoCapture { + var camera: LuminaCamera? + + var stillImage: UIImage? { + didSet { + collectionUpdated() + } + } + + var livePhotoURL: URL? { + didSet { + collectionUpdated() + } + } + + private var _depthData: Any? + @available(iOS 11.0, *) + var depthData: AVDepthData? { + get { + return _depthData as? AVDepthData + } + set { + if newValue != nil { + _depthData = newValue + collectionUpdated() + } + } + } + + fileprivate func collectionUpdated() { + var sendingLivePhotoURL: URL? + var sendingDepthData: Any? + guard let sendingCamera = camera, let image = stillImage else { + return + } + if sendingCamera.captureLivePhotos == true { + if let url = livePhotoURL { + sendingLivePhotoURL = url + } else { + return + } + } + + if sendingCamera.captureDepthData == true, #available(iOS 11.0, *) { + if let data = depthData { + sendingDepthData = data + } else { + return + } + } + DispatchQueue.main.async { + sendingCamera.delegate?.stillImageCaptured(camera: sendingCamera, image: image, livePhotoURL: sendingLivePhotoURL, depthData: sendingDepthData) + } + } +} diff --git a/Lumina/Lumina/LuminaCamera.swift b/Lumina/Lumina/LuminaCamera.swift deleted file mode 100644 index a291bd6..0000000 --- a/Lumina/Lumina/LuminaCamera.swift +++ /dev/null @@ -1,941 +0,0 @@ -// -// Camera.swift -// CameraFramework -// -// Created by David Okun on 8/31/17. -// Copyright © 2017 David Okun. All rights reserved. -// - -import UIKit -import AVFoundation -import CoreML - -protocol LuminaCameraDelegate: class { - func stillImageCaptured(camera: LuminaCamera, image: UIImage, livePhotoURL: URL?, depthData: Any?) - func videoFrameCaptured(camera: LuminaCamera, frame: UIImage) - func videoFrameCaptured(camera: LuminaCamera, frame: UIImage, predictedObjects: [LuminaPrediction]?) - func depthDataCaptured(camera: LuminaCamera, depthData: Any) - func videoRecordingCaptured(camera: LuminaCamera, videoURL: URL) - func finishedFocus(camera: LuminaCamera) - func detected(camera: LuminaCamera, metadata: [Any]) - func cameraSetupCompleted(camera: LuminaCamera, result: CameraSetupResult) - func cameraBeganTakingLivePhoto(camera: LuminaCamera) - func cameraFinishedTakingLivePhoto(camera: LuminaCamera) -} - -enum CameraSetupResult: String { - typealias RawValue = String - case videoPermissionDenied = "Video Permissions Denied" - case videoPermissionRestricted = "Video Permissions Restricted" - case videoRequiresAuthorization = "Video Permissions Require Authorization" - case audioPermissionDenied = "Audio Permissions Denied" - case audioPermissionRestricted = "Audio Permissions Restricted" - case audioRequiresAuthorization = "Audio Permissions Require Authorization" - case unknownError = "Unknown Error" - case invalidVideoDataOutput = "Invalid Video Data Output" - case invalidVideoFileOutput = "Invalid Video File Output" - case invalidVideoMetadataOutput = "Invalid Video Metadata Output" - case invalidPhotoOutput = "Invalid Photo Output" - case invalidDepthDataOutput = "Invalid Depth Data Output" - case invalidVideoInput = "Invalid Video Input" - case invalidAudioInput = "Invalid Audio Input" - case requiresUpdate = "Requires AV Update" - case videoSuccess = "Video Setup Success" - case audioSuccess = "Audio Setup Success" -} - -struct LuminaPhotoCapture { - var camera: LuminaCamera? - - var stillImage: UIImage? { - didSet { - collectionUpdated() - } - } - - var livePhotoURL: URL? { - didSet { - collectionUpdated() - } - } - - private var _depthData: Any? - @available(iOS 11.0, *) - var depthData: AVDepthData? { - get { - return _depthData as? AVDepthData - } - set { - if newValue != nil { - _depthData = newValue - collectionUpdated() - } - } - } - - fileprivate func collectionUpdated() { - var sendingLivePhotoURL: URL? - var sendingDepthData: Any? - guard let sendingCamera = camera, let image = stillImage else { - return - } - if sendingCamera.captureLivePhotos == true { - if let url = livePhotoURL { - sendingLivePhotoURL = url - } else { - return - } - } - - if sendingCamera.captureDepthData == true, #available(iOS 11.0, *) { - if let data = depthData { - sendingDepthData = data - } else { - return - } - } - DispatchQueue.main.async { - sendingCamera.delegate?.stillImageCaptured(camera: sendingCamera, image: image, livePhotoURL: sendingLivePhotoURL, depthData: sendingDepthData) - } - } -} - -final class LuminaCamera: NSObject { - weak var delegate: LuminaCameraDelegate? - - var torchState = false { - didSet { - guard let input = self.videoInput else { - torchState = false - return - } - do { - try input.device.lockForConfiguration() - if torchState == false { - if input.device.isTorchModeSupported(.off) { - input.device.torchMode = .off - input.device.unlockForConfiguration() - } - } else { - if input.device.isTorchModeSupported(.on) { - input.device.torchMode = .on - input.device.unlockForConfiguration() - } - } - } catch { - torchState = false - input.device.unlockForConfiguration() - } - } - } - - var recordsVideo = false { - didSet { - restartVideo() - } - } - - var streamFrames = false { - didSet { - restartVideo() - } - } - - var trackMetadata = false { - didSet { - restartVideo() - } - } - - var captureLivePhotos = false { - didSet { - restartVideo() - } - } - - var captureDepthData = false { - didSet { - restartVideo() - } - } - - var streamDepthData = false { - didSet { - restartVideo() - } - } - - var captureHighResolutionImages = false { - didSet { - restartVideo() - } - } - - private(set) var recordingVideo: Bool = false - - var position: CameraPosition = .back { - didSet { - restartVideo() - } - } - - var resolution: CameraResolution = .highest { - didSet { - restartVideo() - } - } - - var frameRate: Int = 30 { - didSet { - restartVideo() - } - } - - private func restartVideo() { - if self.session.isRunning { - self.session.stopRunning() - updateVideo({ result in - if result == .videoSuccess { - self.start() - } else { - self.delegate?.cameraSetupCompleted(camera: self, result: result) - } - }) - } - } - - var maxZoomScale: Float = MAXFLOAT - - var currentZoomScale: Float = 1.0 { - didSet { - updateZoom() - } - } - - var currentPhotoCollection: LuminaPhotoCapture? - - fileprivate var recognizer: AnyObject? - - private var _streamingModel: AnyObject? - @available(iOS 11.0, *) - var streamingModel: MLModel? { - get { - return _streamingModel as? MLModel - } - set { - if newValue != nil { - _streamingModel = newValue - recognizer = LuminaObjectRecognizer(model: newValue!) - } - } - } - - fileprivate var session = AVCaptureSession() - fileprivate var discoverySession: AVCaptureDevice.DiscoverySession? { - var deviceTypes = [AVCaptureDevice.DeviceType]() - deviceTypes.append(.builtInWideAngleCamera) - if #available(iOS 10.2, *) { - deviceTypes.append(.builtInDualCamera) - } - #if swift(>=4.0.2) // Xcode 9.1 shipped with Swift 4.0.2 - if #available(iOS 11.1, *), self.captureDepthData == true { - deviceTypes.append(.builtInTrueDepthCamera) - } - #endif - return AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified) - } - fileprivate var videoInput: AVCaptureDeviceInput? - fileprivate var audioInput: AVCaptureDeviceInput? - fileprivate var currentCaptureDevice: AVCaptureDevice? - fileprivate var videoBufferQueue = DispatchQueue(label: "com.Lumina.videoBufferQueue", attributes: .concurrent) - fileprivate var metadataBufferQueue = DispatchQueue(label: "com.lumina.metadataBufferQueue") - fileprivate var recognitionBufferQueue = DispatchQueue(label: "com.lumina.recognitionBufferQueue") - fileprivate var sessionQueue = DispatchQueue(label: "com.lumina.sessionQueue") - fileprivate var photoCollectionQueue = DispatchQueue(label: "com.lumina.photoCollectionQueue") - fileprivate var depthDataQueue = DispatchQueue(label: "com.lumina.depthDataQueue") - - fileprivate var videoDataOutput: AVCaptureVideoDataOutput { - let output = AVCaptureVideoDataOutput() - output.alwaysDiscardsLateVideoFrames = true - output.setSampleBufferDelegate(self, queue: videoBufferQueue) - return output - } - fileprivate var photoOutput = AVCapturePhotoOutput() - - private var _metadataOutput: AVCaptureMetadataOutput? - fileprivate var metadataOutput: AVCaptureMetadataOutput { - if let existingOutput = _metadataOutput { - return existingOutput - } - let output = AVCaptureMetadataOutput() - output.setMetadataObjectsDelegate(self, queue: metadataBufferQueue) - _metadataOutput = output - return output - } - - private var _videoFileOutput: AVCaptureMovieFileOutput? - fileprivate var videoFileOutput: AVCaptureMovieFileOutput { - if let existingOutput = _videoFileOutput { - return existingOutput - } - let output = AVCaptureMovieFileOutput() - _videoFileOutput = output - return output - } - - private var _depthDataOutput: AnyObject? - @available(iOS 11.0, *) - fileprivate var depthDataOutput: AVCaptureDepthDataOutput? { - get { - if let existingOutput = _depthDataOutput { - return existingOutput as? AVCaptureDepthDataOutput - } - let output = AVCaptureDepthDataOutput() - output.setDelegate(self, callbackQueue: depthDataQueue) - _depthDataOutput = output - return output - } - set { - _depthDataOutput = newValue - } - } - - func getPreviewLayer() -> AVCaptureVideoPreviewLayer? { - let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) - previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill - return previewLayer - } - - func captureStillImage() { - var settings = AVCapturePhotoSettings() - if #available(iOS 11.0, *) { - if self.photoOutput.availablePhotoCodecTypes.contains(.hevc) { - settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) - } - } - settings.isAutoStillImageStabilizationEnabled = true - settings.flashMode = self.torchState ? .on : .off - if self.captureLivePhotos { - let fileName = NSTemporaryDirectory().appending("livePhoto" + Date().iso8601 + ".mov") - settings.livePhotoMovieFileURL = URL(fileURLWithPath: fileName) - } - if self.captureHighResolutionImages { - settings.isHighResolutionPhotoEnabled = true - } - if #available(iOS 11.0, *) { - if self.captureDepthData && self.photoOutput.isDepthDataDeliverySupported { - settings.isDepthDataDeliveryEnabled = true - } - } - self.photoOutput.capturePhoto(with: settings, delegate: self) - } - - func startVideoRecording() { - recordingVideo = true - sessionQueue.async { - if let connection = self.videoFileOutput.connection(with: AVMediaType.video), let videoConnection = self.videoDataOutput.connection(with: AVMediaType.video) { - connection.videoOrientation = videoConnection.videoOrientation - connection.isVideoMirrored = self.position == .front ? true : false - if connection.isVideoStabilizationSupported { - connection.preferredVideoStabilizationMode = .cinematic - } - self.session.commitConfiguration() - } - let fileName = NSTemporaryDirectory().appending(Date().iso8601 + ".mov") - self.videoFileOutput.startRecording(to: URL(fileURLWithPath: fileName), recordingDelegate: self) - } - } - - func stopVideoRecording() { - recordingVideo = false - sessionQueue.async { - self.videoFileOutput.stopRecording() - } - } - - func updateOutputVideoOrientation(_ orientation: AVCaptureVideoOrientation) { - self.videoBufferQueue.async { - for output in self.session.outputs { - guard let connection = output.connection(with: AVMediaType.video) else { - continue - } - if connection.isVideoOrientationSupported { - connection.videoOrientation = orientation - } - } - } - } - - func updateAudio(_ completion: @escaping (_ result: CameraSetupResult) -> Void) { - self.sessionQueue.async { - self.purgeAudioDevices() - switch AVCaptureDevice.authorizationStatus(for: AVMediaType.audio) { - case .authorized: - guard let audioInput = self.getNewAudioInputDevice() else { - completion(CameraSetupResult.invalidAudioInput) - return - } - guard self.session.canAddInput(audioInput) else { - completion(CameraSetupResult.invalidAudioInput) - return - } - self.audioInput = audioInput - self.session.addInput(audioInput) - completion(CameraSetupResult.audioSuccess) - return - case .denied: - completion(CameraSetupResult.audioPermissionDenied) - return - case .notDetermined: - completion(CameraSetupResult.audioRequiresAuthorization) - return - case .restricted: - completion(CameraSetupResult.audioPermissionRestricted) - return - } - } - } - - func updateVideo(_ completion: @escaping (_ result: CameraSetupResult) -> Void) { - self.sessionQueue.async { - self.purgeVideoDevices() - switch AVCaptureDevice.authorizationStatus(for: AVMediaType.video) { - case .authorized: - completion(self.videoSetupApproved()) - case .denied: - completion(CameraSetupResult.videoPermissionDenied) - return - case .notDetermined: - completion(CameraSetupResult.videoRequiresAuthorization) - return - case .restricted: - completion(CameraSetupResult.videoPermissionRestricted) - return - } - } - } - - private func videoSetupApproved() -> CameraSetupResult { - self.torchState = false - self.session.sessionPreset = .high // set to high here so that device input can be added to session. resolution can be checked for update later - guard let videoInput = self.getNewVideoInputDevice() else { - return .invalidVideoInput - } - guard self.session.canAddInput(videoInput) else { - return .invalidVideoInput - } - guard self.session.canAddOutput(self.videoDataOutput) else { - return .invalidVideoDataOutput - } - guard self.session.canAddOutput(self.photoOutput) else { - return .invalidPhotoOutput - } - guard self.session.canAddOutput(self.metadataOutput) else { - return .invalidVideoMetadataOutput - } - if self.recordsVideo == true { - guard self.session.canAddOutput(self.videoFileOutput) else { - return .invalidVideoFileOutput - } - } - if #available(iOS 11.0, *), let depthDataOutput = self.depthDataOutput { - guard self.session.canAddOutput(depthDataOutput) else { - return .invalidDepthDataOutput - } - } - self.videoInput = videoInput - self.session.addInput(videoInput) - if self.streamFrames { - self.session.addOutput(self.videoDataOutput) - } - self.session.addOutput(self.photoOutput) - self.session.commitConfiguration() - if self.session.canSetSessionPreset(self.resolution.foundationPreset()) { - self.session.sessionPreset = self.resolution.foundationPreset() - } - configureVideoRecordingOutput(for: self.session) - configureMetadataOutput(for: self.session) - configureHiResPhotoOutput(for: self.session) - configureLivePhotoOutput(for: self.session) - configureDepthDataOutput(for: self.session) - configureFrameRate() - return .videoSuccess - } - - private func configureVideoRecordingOutput(for session: AVCaptureSession) { - if self.recordsVideo { - // adding this invalidates the video data output - self.session.addOutput(self.videoFileOutput) - if let connection = self.videoFileOutput.connection(with: .video) { - if connection.isVideoStabilizationSupported { - connection.preferredVideoStabilizationMode = .auto - } - } - } - } - - private func configureHiResPhotoOutput(for session: AVCaptureSession) { - if self.captureHighResolutionImages && self.photoOutput.isHighResolutionCaptureEnabled { - self.photoOutput.isHighResolutionCaptureEnabled = true - } else { - self.captureHighResolutionImages = false - } - } - - private func configureLivePhotoOutput(for session: AVCaptureSession) { - if self.captureLivePhotos && self.photoOutput.isLivePhotoCaptureSupported { - self.photoOutput.isLivePhotoCaptureEnabled = true - } else { - self.captureLivePhotos = false - } - } - - private func configureMetadataOutput(for session: AVCaptureSession) { - if self.trackMetadata { - session.addOutput(self.metadataOutput) - self.metadataOutput.metadataObjectTypes = self.metadataOutput.availableMetadataObjectTypes - } - } - - private func configureDepthDataOutput(for session: AVCaptureSession) { - if #available(iOS 11.0, *) { - if self.captureDepthData && self.photoOutput.isDepthDataDeliverySupported { - self.photoOutput.isDepthDataDeliveryEnabled = true - } else { - self.captureDepthData = false - } - } else { - self.captureDepthData = false - } - if #available(iOS 11.0, *) { - if self.streamDepthData, let depthDataOutput = self.depthDataOutput { - session.addOutput(depthDataOutput) - session.commitConfiguration() - } - } - } - - func start() { - self.sessionQueue.async { - self.session.startRunning() - } - } - - func pause() { - self.session.stopRunning() - } - - func requestVideoPermissions() { - self.sessionQueue.suspend() - AVCaptureDevice.requestAccess(for: .video) { success in - if success { - self.sessionQueue.resume() - self.delegate?.cameraSetupCompleted(camera: self, result: .requiresUpdate) - } else { - self.delegate?.cameraSetupCompleted(camera: self, result: .videoPermissionDenied) - } - } - } - - func requestAudioPermissions() { - self.sessionQueue.suspend() - AVCaptureDevice.requestAccess(for: AVMediaType.audio) { success in - if success { - self.sessionQueue.resume() - self.delegate?.cameraSetupCompleted(camera: self, result: .requiresUpdate) - } else { - self.delegate?.cameraSetupCompleted(camera: self, result: .audioPermissionDenied) - } - } - } -} - -// MARK: Zoom Handling - -fileprivate extension LuminaCamera { - func updateZoom() { - guard let input = self.videoInput else { - return - } - let device = input.device - do { - try device.lockForConfiguration() - let newZoomScale = min(maxZoomScale, max(Float(1.0), min(currentZoomScale, Float(device.activeFormat.videoMaxZoomFactor)))) - device.videoZoomFactor = CGFloat(newZoomScale) - device.unlockForConfiguration() - } catch { - device.unlockForConfiguration() - } - } -} - -// MARK: Focus Handling - -extension LuminaCamera { - func handleFocus(at focusPoint: CGPoint) { - self.sessionQueue.async { - guard let input = self.videoInput else { - return - } - do { - if input.device.isFocusModeSupported(.autoFocus) && input.device.isFocusPointOfInterestSupported { - try input.device.lockForConfiguration() - input.device.focusMode = .autoFocus - input.device.focusPointOfInterest = CGPoint(x: focusPoint.x, y: focusPoint.y) - if input.device.isExposureModeSupported(.autoExpose) && input.device.isExposurePointOfInterestSupported { - input.device.exposureMode = .autoExpose - input.device.exposurePointOfInterest = CGPoint(x: focusPoint.x, y: focusPoint.y) - } - input.device.unlockForConfiguration() - } else { - self.delegate?.finishedFocus(camera: self) - } - } catch { - self.delegate?.finishedFocus(camera: self) - } - } - } - - func resetCameraToContinuousExposureAndFocus() { - do { - guard let input = self.videoInput else { - print("Trying to focus, but cannot detect device input!") - return - } - if input.device.isFocusModeSupported(.continuousAutoFocus) { - try input.device.lockForConfiguration() - input.device.focusMode = .autoFocus - if input.device.isExposureModeSupported(.continuousAutoExposure) { - input.device.exposureMode = .continuousAutoExposure - } - input.device.unlockForConfiguration() - } - } catch { - print("could not reset to continuous auto focus and exposure!!") - } - } -} - -// MARK: CaptureDevice Handling - -private extension LuminaCamera { - func getNewVideoInputDevice() -> AVCaptureDeviceInput? { - do { - guard let device = getDevice(with: self.position == .front ? AVCaptureDevice.Position.front : AVCaptureDevice.Position.back) else { - print("could not find valid AVCaptureDevice") - return nil - } - let input = try AVCaptureDeviceInput(device: device) - return input - } catch { - return nil - } - } - - func getNewAudioInputDevice() -> AVCaptureDeviceInput? { - do { - guard let device = AVCaptureDevice.default(for: AVMediaType.audio) else { - return nil - } - let deviceInput = try AVCaptureDeviceInput(device: device) - return deviceInput - } catch { - return nil - } - } - - func purgeAudioDevices() { - for oldInput in self.session.inputs where oldInput == self.audioInput { - self.session.removeInput(oldInput) - } - } - - func purgeVideoDevices() { - for oldInput in self.session.inputs where oldInput == self.videoInput { - self.session.removeInput(oldInput) - } - for oldOutput in self.session.outputs { - if oldOutput == self.videoDataOutput || oldOutput == self.photoOutput || oldOutput == self.metadataOutput || oldOutput == self.videoFileOutput { - self.session.removeOutput(oldOutput) - } - if let dataOutput = oldOutput as? AVCaptureVideoDataOutput { - self.session.removeOutput(dataOutput) - } - if #available(iOS 11.0, *) { - if let depthOutput = oldOutput as? AVCaptureDepthDataOutput { - self.session.removeOutput(depthOutput) - } - } - } - } - - func getDevice(with position: AVCaptureDevice.Position) -> AVCaptureDevice? { - #if swift(>=4.0.2) - if #available(iOS 11.1, *), position == .front { - if let device = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .video, position: .front) { - return device - } - } - #endif - if #available(iOS 10.2, *), let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: position) { - return device - } else if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position) { - return device - } - return nil - } - - func configureFrameRate() { - guard let device = self.currentCaptureDevice else { - return - } - for vFormat in device.formats { - let dimensions = CMVideoFormatDescriptionGetDimensions(vFormat.formatDescription) - let ranges = vFormat.videoSupportedFrameRateRanges as [AVFrameRateRange] - guard let frameRate = ranges.first else { - continue - } - if frameRate.maxFrameRate >= Float64(self.frameRate) && - frameRate.minFrameRate <= Float64(self.frameRate) && - self.resolution.getDimensions().width == dimensions.width && - self.resolution.getDimensions().height == dimensions.height && - CMFormatDescriptionGetMediaSubType(vFormat.formatDescription) == 875704422 { // meant for full range 420f - do { - try device.lockForConfiguration() - device.activeFormat = vFormat as AVCaptureDevice.Format - device.activeVideoMinFrameDuration = CMTimeMake(1, Int32(self.frameRate)) - device.activeVideoMaxFrameDuration = CMTimeMake(1, Int32(self.frameRate)) - device.unlockForConfiguration() - break - } catch { - continue - } - } - } - } -} - -// MARK: Still Photo Capture - -extension LuminaCamera: AVCapturePhotoCaptureDelegate { - @available (iOS 11.0, *) - func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { - guard let image = photo.normalizedImage(forCameraPosition: self.position) else { - return - } - photoCollectionQueue.sync { - if self.currentPhotoCollection == nil { - var collection = LuminaPhotoCapture() - collection.camera = self - collection.depthData = photo.depthData - collection.stillImage = image - self.currentPhotoCollection = collection - } else { - guard var collection = self.currentPhotoCollection else { - return - } - collection.camera = self - collection.depthData = photo.depthData - collection.stillImage = image - self.currentPhotoCollection = collection - } - } - } - - func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { - if #available(iOS 11.0, *) { // make use of AVCapturePhotoOutput - return - } else { - guard let buffer = photoSampleBuffer else { - return - } - guard let image = buffer.normalizedStillImage(forCameraPosition: self.position) else { - return - } - delegate?.stillImageCaptured(camera: self, image: image, livePhotoURL: nil, depthData: nil) - } - } -} - -// MARK: AVCapturePhoto Methods -@available (iOS 11.0, *) -extension AVCapturePhoto { - func normalizedImage(forCameraPosition position: CameraPosition) -> UIImage? { - guard let cgImage = self.cgImageRepresentation() else { - return nil - } - return UIImage(cgImage: cgImage.takeUnretainedValue(), scale: 1.0, orientation: getImageOrientation(forCamera: position)) - } - - private func getImageOrientation(forCamera: CameraPosition) -> UIImageOrientation { - switch UIApplication.shared.statusBarOrientation { - case .landscapeLeft: - return forCamera == .back ? .down : .upMirrored - case .landscapeRight: - return forCamera == .back ? .up : .downMirrored - case .portraitUpsideDown: - return forCamera == .back ? .left : .rightMirrored - case .portrait: - return forCamera == .back ? .right : .leftMirrored - case .unknown: - return forCamera == .back ? .right : .leftMirrored - } - } -} - -// MARK: Video Frame Streaming - -extension LuminaCamera: AVCaptureVideoDataOutputSampleBufferDelegate { - func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - guard let image = sampleBuffer.normalizedVideoFrame() else { - return - } - if #available(iOS 11.0, *) { - guard let recognizer = self.recognizer as? LuminaObjectRecognizer else { - DispatchQueue.main.async { - self.delegate?.videoFrameCaptured(camera: self, frame: image) - } - return - } - recognizer.recognize(from: image, completion: { predictions in - DispatchQueue.main.async { - self.delegate?.videoFrameCaptured(camera: self, frame: image, predictedObjects: predictions) - } - }) - } else { - DispatchQueue.main.async { - self.delegate?.videoFrameCaptured(camera: self, frame: image) - } - } - } -} - -// MARK: Image Normalization Methods - -extension CMSampleBuffer { - func normalizedStillImage(forCameraPosition position: CameraPosition) -> UIImage? { - guard let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: self, previewPhotoSampleBuffer: nil) else { - return nil - } - guard let dataProvider = CGDataProvider(data: imageData as CFData) else { - return nil - } - guard let cgImageRef = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent) else { - return nil - } - return UIImage(cgImage: cgImageRef, scale: 1.0, orientation: getImageOrientation(forCamera: position)) - } - - func normalizedVideoFrame() -> UIImage? { - guard let imageBuffer = CMSampleBufferGetImageBuffer(self) else { - return nil - } - let coreImage: CIImage = CIImage(cvPixelBuffer: imageBuffer) - let context: CIContext = CIContext() - guard let sample: CGImage = context.createCGImage(coreImage, from: coreImage.extent) else { - return nil - } - return UIImage(cgImage: sample) - } - - private func getImageOrientation(forCamera: CameraPosition) -> UIImageOrientation { - switch UIApplication.shared.statusBarOrientation { - case .landscapeLeft: - return forCamera == .back ? .down : .upMirrored - case .landscapeRight: - return forCamera == .back ? .up : .downMirrored - case .portraitUpsideDown: - return forCamera == .back ? .left : .rightMirrored - case .portrait: - return forCamera == .back ? .right : .leftMirrored - case .unknown: - return forCamera == .back ? .right : .leftMirrored - } - } -} - -// MARK: MetadataOutput Delegate Methods - -extension LuminaCamera: AVCaptureMetadataOutputObjectsDelegate { - func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { - guard case self.trackMetadata = true else { - return - } - DispatchQueue.main.async { - self.delegate?.detected(camera: self, metadata: metadataObjects) - } - } -} - -// MARK: DepthDataOutput Delegate Methods - -@available(iOS 11.0, *) -extension LuminaCamera: AVCaptureDepthDataOutputDelegate { - func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) { - DispatchQueue.main.async { - self.delegate?.depthDataCaptured(camera: self, depthData: depthData) - } - } - - func depthDataOutput(_ output: AVCaptureDepthDataOutput, didDrop depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection, reason: AVCaptureOutput.DataDroppedReason) { - // place to handle dropped AVDepthData if we need it - } -} - -// MARK: RecordingOutput Delegate Methods - -extension LuminaCamera: AVCaptureFileOutputRecordingDelegate { - func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { - DispatchQueue.main.async { - if error == nil, let delegate = self.delegate { - delegate.videoRecordingCaptured(camera: self, videoURL: outputFileURL) - } - } - } - - func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) { - if self.captureLivePhotos { - self.delegate?.cameraBeganTakingLivePhoto(camera: self) - } - } - - func photoOutput(_ output: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) { - if self.captureLivePhotos { - self.delegate?.cameraFinishedTakingLivePhoto(camera: self) - } - } - - func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) { - photoCollectionQueue.sync { - if self.currentPhotoCollection == nil { - var collection = LuminaPhotoCapture() - collection.camera = self - collection.livePhotoURL = outputFileURL - self.currentPhotoCollection = collection - } else { - guard var collection = self.currentPhotoCollection else { - return - } - collection.camera = self - collection.livePhotoURL = outputFileURL - self.currentPhotoCollection = collection - } - } - } -} - -extension Formatter { - static let iso8601: DateFormatter = { - let formatter = DateFormatter() - formatter.calendar = Calendar(identifier: .iso8601) - formatter.locale = Locale(identifier: "en_US_POSIX") - formatter.timeZone = TimeZone(secondsFromGMT: 0) - formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX" - return formatter - }() -} -extension Date { - var iso8601: String { - return Formatter.iso8601.string(from: self) - } -} diff --git a/Lumina/Lumina/LuminaViewController.swift b/Lumina/Lumina/LuminaViewController.swift deleted file mode 100644 index 048fc19..0000000 --- a/Lumina/Lumina/LuminaViewController.swift +++ /dev/null @@ -1,833 +0,0 @@ -// -// CameraViewController.swift -// CameraFramework -// -// Created by David Okun on 8/29/17. -// Copyright © 2017 David Okun. All rights reserved. -// - -import UIKit -import AVFoundation -import CoreML - -/// Delegate for returning information to the application utilizing Lumina -public protocol LuminaDelegate: class { - - /// Triggered whenever a still image is captured by the user of Lumina - /// - /// - Parameters: - /// - stillImage: the image captured by Lumina - /// - livePhotoAt: the URL where the live photo file can be located and used, if enabled and available - /// - depthData: the depth data associated with the captured still image, if enabled and available (iOS 11.0 only) - /// - controller: the instance of Lumina that captured the still image - func captured(stillImage: UIImage, livePhotoAt: URL?, depthData: Any?, from controller: LuminaViewController) - - /// Triggered whenever a video is captured by the user of Lumina - /// - /// - Parameters: - /// - videoAt: the URL where the video file can be located and used - /// - controller: the instance of Lumina that captured the still image - func captured(videoAt: URL, from controller: LuminaViewController) - - /// Triggered whenever streamFrames is set to true on Lumina, and streams video frames as UIImage instances - /// - /// - Note: Will not be triggered unless streamFrames is true. False is default value - /// - Parameters: - /// - videoFrame: the frame captured by Lumina - /// - controller: the instance of Lumina that is streaming the frames - func streamed(videoFrame: UIImage, from controller: LuminaViewController) - - /// Triggered whenever a CoreML model is given to Lumina, and Lumina streams a video frame alongside a prediction - /// - /// - Note: Will not be triggered unless streamingModel resolves to not nil. Leaving the streamingModel parameter unset will not trigger this method - /// - Warning: The other method for passing video frames back via a delegate will not be triggered in the presence of a CoreML model - /// - Parameters: - /// - videoFrame: the frame captured by Lumina - /// - predictions: the predictions made by the model used with Lumina - /// - controller: the instance of Lumina that is streaming the frames - func streamed(videoFrame: UIImage, with predictions: [LuminaPrediction]?, from controller: LuminaViewController) - - /// Triggered whenever streamDepthData is set to true on Lumina, and streams depth data detected in the form of AVDepthData - /// - /// - Warning: This data is returned from type `Any`, and must be optionally downcast to `AVDepthData` by the user of Lumina. This is to maintain backwards compatibility with iOS 10.0 - /// - Note: This is only available on iOS 11.0 - /// - Parameters: - /// - depthData: buffer containing AVDepthData relevant to the streamed video frame - /// - controller: the instance of Lumina that is streaming the depth data - func streamed(depthData: Any, from controller: LuminaViewController) - - /// Triggered whenever trackMetadata is set to true on Lumina, and streams metadata detected in the form of QR codes, bar codes, or faces - /// - /// - Note: For list of all machine readable object types, aside from QR codes or faces, click [here](https://developer.apple.com/documentation/avfoundation/avmetadatamachinereadablecodeobject/machine_readable_object_types). - /// - /// - Warning: Objects returned in array must be casted to AVMetadataObject or AVMetadataFaceObject individually. - /// - /// - Parameters: - /// - metadata: the array of metadata that is captured. - /// - controller: the instance of Lumina that is streaming the metadata - func detected(metadata: [Any], from controller: LuminaViewController) - - /// Triggered whenever the cancel button is tapped on Lumina, with the intent of dismissing the UIViewController - /// - /// - Note: This is most usually used whenever - /// - /// - Parameter controller: the instance of Lumina that cancel was tapped on - func dismissed(controller: LuminaViewController) -} - -// MARK: Extension to make delegate functions optional - -public extension LuminaDelegate { - func captured(stillImage: UIImage, livePhotoAt: URL?, depthData: Any?, from controller: LuminaViewController) {} - func captured(videoAt: URL, from controller: LuminaViewController) {} - func streamed(videoFrame: UIImage, from controller: LuminaViewController) {} - func streamed(videoFrame: UIImage, with predictions: [LuminaPrediction]?, from controller: LuminaViewController) {} - func streamed(depthData: Any, from controller: LuminaViewController) {} - func detected(metadata: [Any], from controller: LuminaViewController) {} - func dismissed(controller: LuminaViewController) {} -} - -/// The position of the camera that is active on Lumina -public enum CameraPosition { - /// the front facing camera of the iOS device - case front - /// the back (and usually main) facing camera of the iOS device - case back -} - -/// The resolution to set the camera to at any time - refer to AVCaptureSession.Preset definitions for matching, closest as of iOS 11 -public enum CameraResolution: String { - case low352x288 = "Low 352x288" - case vga640x480 = "VGA 640x480" - case medium1280x720 = "Medium 1280x720" - case high1920x1080 = "HD 1920x1080" - case ultra3840x2160 = "4K 3840x2160" - case iframe1280x720 = "iFrame 1280x720" - case iframe960x540 = "iFrame 960x540" - case photo = "Photo" - case lowest = "Lowest" - case medium = "Medium" - case highest = "Highest" - case inputPriority = "Input Priority" - - public static func all() -> [CameraResolution] { - return [CameraResolution.low352x288, CameraResolution.vga640x480, CameraResolution.medium1280x720, CameraResolution.high1920x1080, CameraResolution.ultra3840x2160, CameraResolution.iframe1280x720, CameraResolution.iframe960x540, CameraResolution.photo, CameraResolution.lowest, CameraResolution.medium, CameraResolution.highest, CameraResolution.inputPriority] - } - - func foundationPreset() -> AVCaptureSession.Preset { - switch self { - case .vga640x480: - return AVCaptureSession.Preset.vga640x480 - case .low352x288: - return AVCaptureSession.Preset.cif352x288 - case .medium1280x720: - return AVCaptureSession.Preset.hd1280x720 - case .high1920x1080: - return AVCaptureSession.Preset.hd1920x1080 - case .ultra3840x2160: - return AVCaptureSession.Preset.hd4K3840x2160 - case .iframe1280x720: - return AVCaptureSession.Preset.iFrame1280x720 - case .iframe960x540: - return AVCaptureSession.Preset.iFrame960x540 - case .photo: - return AVCaptureSession.Preset.photo - case .lowest: - return AVCaptureSession.Preset.low - case .medium: - return AVCaptureSession.Preset.medium - case .highest: - return AVCaptureSession.Preset.high - case .inputPriority: - return AVCaptureSession.Preset.inputPriority - } - } - - func getDimensions() -> CMVideoDimensions { - switch self { - case .vga640x480: - return CMVideoDimensions(width: 640, height: 480) - case .low352x288: - return CMVideoDimensions(width: 352, height: 288) - case .medium1280x720, .iframe1280x720, .medium: - return CMVideoDimensions(width: 1280, height: 720) - case .high1920x1080, .highest: - return CMVideoDimensions(width: 1920, height: 1080) - case .ultra3840x2160: - return CMVideoDimensions(width: 3840, height: 2160) - case .iframe960x540: - return CMVideoDimensions(width: 960, height: 540) - case .photo: - return CMVideoDimensions(width: INT32_MAX, height: INT32_MAX) - case .lowest: - return CMVideoDimensions(width: 352, height: 288) - case .inputPriority: - return CMVideoDimensions(width: INT32_MAX, height: INT32_MAX) - } - } -} - -/// The main class that developers should interact with and instantiate when using Lumina -public final class LuminaViewController: UIViewController { - var camera: LuminaCamera? - - private var _previewLayer: AVCaptureVideoPreviewLayer? - var previewLayer: AVCaptureVideoPreviewLayer { - if let currentLayer = _previewLayer { - return currentLayer - } - guard let camera = self.camera, let layer = camera.getPreviewLayer() else { - return AVCaptureVideoPreviewLayer() - } - layer.frame = self.view.bounds - _previewLayer = layer - return layer - } - - private var _zoomRecognizer: UIPinchGestureRecognizer? - var zoomRecognizer: UIPinchGestureRecognizer { - if let currentRecognizer = _zoomRecognizer { - return currentRecognizer - } - let recognizer = UIPinchGestureRecognizer(target: self, action: #selector(handlePinchGestureRecognizer(recognizer:))) - recognizer.delegate = self - _zoomRecognizer = recognizer - return recognizer - } - - private var _focusRecognizer: UITapGestureRecognizer? - var focusRecognizer: UITapGestureRecognizer { - if let currentRecognizer = _focusRecognizer { - return currentRecognizer - } - let recognizer = UITapGestureRecognizer(target: self, action: #selector(handleTapGestureRecognizer(recognizer:))) - recognizer.delegate = self - _focusRecognizer = recognizer - return recognizer - } - - private var _feedbackGenerator: LuminaHapticFeedbackGenerator? - var feedbackGenerator: LuminaHapticFeedbackGenerator { - if let currentGenerator = _feedbackGenerator { - return currentGenerator - } - let generator = LuminaHapticFeedbackGenerator() - _feedbackGenerator = generator - return generator - } - - private var _cancelButton: LuminaButton? - var cancelButton: LuminaButton { - if let currentButton = _cancelButton { - return currentButton - } - let button = LuminaButton(with: SystemButtonType.cancel) - button.addTarget(self, action: #selector(cancelButtonTapped), for: .touchUpInside) - _cancelButton = button - return button - } - - private var _shutterButton: LuminaButton? - var shutterButton: LuminaButton { - if let currentButton = _shutterButton { - return currentButton - } - let button = LuminaButton(with: SystemButtonType.shutter) - button.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(shutterButtonTapped))) - button.addGestureRecognizer(UILongPressGestureRecognizer(target: self, action: #selector(shutterButtonLongPressed))) - _shutterButton = button - return button - } - - private var _switchButton: LuminaButton? - var switchButton: LuminaButton { - if let currentButton = _switchButton { - return currentButton - } - let button = LuminaButton(with: SystemButtonType.cameraSwitch) - button.addTarget(self, action: #selector(switchButtonTapped), for: .touchUpInside) - _switchButton = button - return button - } - - private var _torchButton: LuminaButton? - var torchButton: LuminaButton { - if let currentButton = _torchButton { - return currentButton - } - let button = LuminaButton(with: SystemButtonType.torch) - button.addTarget(self, action: #selector(torchButtonTapped), for: .touchUpInside) - _torchButton = button - return button - } - - private var _textPromptView: LuminaTextPromptView? - var textPromptView: LuminaTextPromptView { - if let existingView = _textPromptView { - return existingView - } - let promptView = LuminaTextPromptView() - _textPromptView = promptView - return promptView - } - - fileprivate var isUpdating = false - - /// The delegate for streaming output from Lumina - weak open var delegate: LuminaDelegate? - - /// The position of the camera - /// - /// - Note: Responds live to being set at any time, and will update automatically - open var position: CameraPosition = .back { - didSet { - guard let camera = self.camera else { - return - } - camera.position = position - } - } - - /// Set this to choose whether or not Lumina will be able to record video by holding down the capture button - /// - /// - Note: Responds live to being set at any time, and will update automatically - /// - /// - Warning: This setting takes precedence over video data streaming - if this is turned on, frames cannot be streamed, nor can CoreML be used via Lumina's recognizer mechanism. - open var recordsVideo = false { - didSet { - if let camera = self.camera { - camera.recordsVideo = recordsVideo - } - } - } - - /// Set this to choose whether or not Lumina will stream video frames through the delegate - /// - /// - Note: Responds live to being set at any time, and will update automatically - /// - /// - Warning: Will not do anything if delegate is not implemented - open var streamFrames = false { - didSet { - if let camera = self.camera { - camera.streamFrames = streamFrames - } - } - } - - /// Set this to choose whether or not Lumina will stream machine readable metadata through the delegate - /// - /// - Note: Responds live to being set at any time, and will update automatically - /// - /// - Warning: Will not do anything if delegate is not implemented - open var trackMetadata = false { - didSet { - if let camera = self.camera { - camera.trackMetadata = trackMetadata - } - } - } - - /// Lumina comes ready with a view for a text prompt to give instructions to the user, and this is where you can set the text of that prompt - /// - /// - Note: Responds live to being set at any time, and will update automatically - /// - /// - Warning: If left empty, or unset, no view will be present, but view will be created if changed - open var textPrompt = "" { - didSet { - self.textPromptView.updateText(to: textPrompt) - } - } - - /// Set this to choose a resolution for the camera at any time - defaults to highest resolution possible for camera - /// - /// - Note: Responds live to being set at any time, and will update automatically - open var resolution: CameraResolution = .highest { - didSet { - if let camera = self.camera { - camera.resolution = resolution - } - } - } - - /// Set this to choose a frame rate for the camera at any time - defaults to 30 if query is not available - /// - /// - Note: Responds live to being set at any time, and will update automatically - open var frameRate: Int = 30 { - didSet { - if let camera = self.camera { - camera.frameRate = frameRate - } - } - } - - private var _streamingModel: AnyObject? - - /// A model that will be used when streaming images for object recognition - /// - /// - Note: Only works on iOS 11 and up - /// - /// - Warning: If this is set, streamFrames is over-ridden to true - @available(iOS 11.0, *) - open var streamingModel: MLModel? { - get { - return _streamingModel as? MLModel - } - set { - if newValue != nil { - _streamingModel = newValue - self.streamFrames = true - if let camera = self.camera { - camera.streamingModel = newValue - } - } - } - } - - /// The maximum amount of zoom that Lumina can use - /// - /// - Note: Default value will rely on whatever the active device can handle, if this is not explicitly set - open var maxZoomScale: Float = MAXFLOAT { - didSet { - if let camera = camera { - camera.maxZoomScale = maxZoomScale - } - } - } - - /// Set this to decide whether live photos will be captured whenever a still image is captured. - /// - /// - Note: Overrides cameraResolution to .photo - /// - /// - Warning: If video recording is enabled, live photos will not work. - open var captureLivePhotos: Bool = false { - didSet { - if let camera = camera { - camera.captureLivePhotos = captureLivePhotos - } - } - } - - /// Set this to return AVDepthData with a still captured image - /// - /// - Note: Only works on iOS 11.0 or higher - /// - Note: Only works with .photo, .medium1280x720, and .vga640x480 resolutions - open var captureDepthData: Bool = false { - didSet { - if let camera = camera { - camera.captureDepthData = captureDepthData - } - } - } - - /// Set this to return AVDepthData with streamed video frames - /// - /// - Note: Only works on iOS 11.0 or higher - /// - Note: Only works with .photo, .medium1280x720, and .vga640x480 resolutions - open var streamDepthData: Bool = false { - didSet { - if let camera = camera { - camera.streamDepthData = streamDepthData - } - } - } - -// open var capturesHighResolutionImages: Bool = false { -// didSet { -// if let camera = camera { -// camera.capturesHighResolutionImages = capturesHighResolutionImages -// } -// } -// } - - fileprivate var currentZoomScale: Float = 1.0 { - didSet { - if let camera = self.camera { - camera.currentZoomScale = currentZoomScale - } - } - } - - fileprivate var beginZoomScale: Float = 1.0 - - /// run this in order to create Lumina - public init() { - super.init(nibName: nil, bundle: nil) - let camera = LuminaCamera() - camera.delegate = self - self.camera = camera - } - - /// run this in order to create Lumina with a storyboard - public required init?(coder aDecoder: NSCoder) { - super.init(coder: aDecoder) - let camera = LuminaCamera() - camera.delegate = self - self.camera = camera - } - - /// override with caution - public override func didReceiveMemoryWarning() { - super.didReceiveMemoryWarning() - print("Camera framework is overloading on memory") - } - - /// override with caution - public override func viewWillAppear(_ animated: Bool) { - super.viewWillAppear(animated) - createUI() - if let camera = self.camera { - camera.updateVideo({ result in - self.handleCameraSetupResult(result) - }) - camera.updateAudio({ result in - self.handleCameraSetupResult(result) - }) - } - } - - /// override with caution - public override func viewDidAppear(_ animated: Bool) { - super.viewDidAppear(animated) - feedbackGenerator.prepare() - } - - public override var shouldAutorotate: Bool { - guard let camera = self.camera else { - return true - } - return !camera.recordingVideo - } - - /// override with caution - public override func viewDidDisappear(_ animated: Bool) { - super.viewDidDisappear(true) - if let camera = self.camera { - camera.pause() - } - } - - /// override with caution - public override func viewWillLayoutSubviews() { - super.viewWillLayoutSubviews() - if self.camera?.recordingVideo == true { - return - } - updateUI(orientation: UIApplication.shared.statusBarOrientation) - updateButtonFrames() - } - - /// override with caution - override public var prefersStatusBarHidden: Bool { - return true - } - - /// returns a string of the version of Lumina currently in use, follows semantic versioning. - open class func getVersion() -> String? { - let bundle = Bundle(for: LuminaViewController.self) - guard let infoDictionary = bundle.infoDictionary else { - return nil - } - guard let versionString = infoDictionary["CFBundleShortVersionString"] as? String else { - return nil - } - return versionString - } -} - -// MARK: User Interface Creation - -fileprivate extension LuminaViewController { - @objc func handlePinchGestureRecognizer(recognizer: UIPinchGestureRecognizer) { - guard self.position == .back else { - return - } - currentZoomScale = min(maxZoomScale, max(1.0, beginZoomScale * Float(recognizer.scale))) - } - - @objc func handleTapGestureRecognizer(recognizer: UITapGestureRecognizer) { - if self.position == .back { - focusCamera(at: recognizer.location(in: self.view)) - } - } - - func createUI() { - self.view.layer.addSublayer(self.previewLayer) - self.view.addSubview(self.cancelButton) - self.view.addSubview(self.shutterButton) - self.view.addSubview(self.switchButton) - self.view.addSubview(self.torchButton) - self.view.addSubview(self.textPromptView) - self.view.addGestureRecognizer(self.zoomRecognizer) - self.view.addGestureRecognizer(self.focusRecognizer) - enableUI(valid: false) - } - - func enableUI(valid: Bool) { - DispatchQueue.main.async { - self.shutterButton.isEnabled = valid - self.switchButton.isEnabled = valid - self.torchButton.isEnabled = valid - } - } - - func updateUI(orientation: UIInterfaceOrientation) { - guard let connection = self.previewLayer.connection, connection.isVideoOrientationSupported else { - return - } - self.previewLayer.frame = self.view.bounds - connection.videoOrientation = necessaryVideoOrientation(for: orientation) - if let camera = self.camera { - camera.updateOutputVideoOrientation(connection.videoOrientation) - } - } - - func updateButtonFrames() { - self.cancelButton.center = CGPoint(x: self.view.frame.minX + 55, y: self.view.frame.maxY - 45) - if self.view.frame.width > self.view.frame.height { - self.shutterButton.center = CGPoint(x: self.view.frame.maxX - 45, y: self.view.frame.midY) - } else { - self.shutterButton.center = CGPoint(x: self.view.frame.midX, y: self.view.frame.maxY - 45) - } - self.switchButton.center = CGPoint(x: self.view.frame.maxX - 25, y: self.view.frame.minY + 25) - self.torchButton.center = CGPoint(x: self.view.frame.minX + 25, y: self.view.frame.minY + 25) - self.textPromptView.center = CGPoint(x: self.view.frame.midX, y: self.view.frame.minY + 45) - self.textPromptView.layoutSubviews() - } - - private func handleCameraSetupResult(_ result: CameraSetupResult) { - DispatchQueue.main.async { - switch result { - case .videoSuccess: - guard let camera = self.camera else { - return - } - self.enableUI(valid: true) - camera.start() - case .audioSuccess: - break - case .requiresUpdate: - guard let camera = self.camera else { - return - } - camera.updateVideo({ result in - self.handleCameraSetupResult(result) - }) - case .videoPermissionDenied: - self.textPrompt = "Camera permissions for Lumina have been previously denied - please access your privacy settings to change this." - case .videoPermissionRestricted: - self.textPrompt = "Camera permissions for Lumina have been restricted - please access your privacy settings to change this." - case .videoRequiresAuthorization: - guard let camera = self.camera else { - break - } - camera.requestVideoPermissions() - case .audioPermissionRestricted: - self.textPrompt = "Audio permissions for Lumina have been restricted - please access your privacy settings to change this." - DispatchQueue.main.asyncAfter(deadline: .now() + 2) { - self.textPrompt = "" - } - case .audioRequiresAuthorization: - guard let camera = self.camera else { - break - } - camera.requestAudioPermissions() - case .audioPermissionDenied: - self.textPrompt = "Audio permissions for Lumina have been previously denied - please access your privacy settings to change this." - DispatchQueue.main.asyncAfter(deadline: .now() + 2) { - self.textPrompt = "" - } - case .invalidVideoDataOutput, - .invalidVideoInput, - .invalidPhotoOutput, - .invalidVideoMetadataOutput, - .invalidVideoFileOutput, - .invalidAudioInput, - .invalidDepthDataOutput: - self.textPrompt = "\(result.rawValue) - please try again" - case .unknownError: - self.textPrompt = "Unknown error occurred while loading Lumina - please try again" - } - } - } - - private func necessaryVideoOrientation(for statusBarOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation { - switch statusBarOrientation { - case .portrait: - return AVCaptureVideoOrientation.portrait - case .landscapeLeft: - return AVCaptureVideoOrientation.landscapeLeft - case .landscapeRight: - return AVCaptureVideoOrientation.landscapeRight - case .portraitUpsideDown: - return AVCaptureVideoOrientation.portraitUpsideDown - default: - return AVCaptureVideoOrientation.portrait - } - } -} - -// MARK: CameraDelegate Functions - -extension LuminaViewController: LuminaCameraDelegate { - func videoRecordingCaptured(camera: LuminaCamera, videoURL: URL) { - delegate?.captured(videoAt: videoURL, from: self) - } - - func videoFrameCaptured(camera: LuminaCamera, frame: UIImage, predictedObjects: [LuminaPrediction]?) { - delegate?.streamed(videoFrame: frame, with: predictedObjects, from: self) - } - - func finishedFocus(camera: LuminaCamera) { - DispatchQueue.main.async { - self.isUpdating = false - } - } - - func stillImageCaptured(camera: LuminaCamera, image: UIImage, livePhotoURL: URL?, depthData: Any?) { - camera.currentPhotoCollection = nil - delegate?.captured(stillImage: image, livePhotoAt: livePhotoURL, depthData: depthData, from: self) - } - - func videoFrameCaptured(camera: LuminaCamera, frame: UIImage) { - delegate?.streamed(videoFrame: frame, from: self) - } - - func detected(camera: LuminaCamera, metadata: [Any]) { - delegate?.detected(metadata: metadata, from: self) - } - - func cameraSetupCompleted(camera: LuminaCamera, result: CameraSetupResult) { - handleCameraSetupResult(result) - } - - func cameraBeganTakingLivePhoto(camera: LuminaCamera) { - DispatchQueue.main.async { - self.textPrompt = "Capturing live photo..." - } - } - - func cameraFinishedTakingLivePhoto(camera: LuminaCamera) { - DispatchQueue.main.async { - self.textPrompt = "" - } - } - - func depthDataCaptured(camera: LuminaCamera, depthData: Any) { - delegate?.streamed(depthData: depthData, from: self) - } -} - -// MARK: UIButton Functions - -fileprivate extension LuminaViewController { - @objc func cancelButtonTapped() { - delegate?.dismissed(controller: self) - } - - @objc func shutterButtonTapped() { - shutterButton.takePhoto() - previewLayer.opacity = 0 - UIView.animate(withDuration: 0.25) { - self.previewLayer.opacity = 1 - } - guard let camera = self.camera else { - return - } - camera.captureStillImage() - } - - @objc func shutterButtonLongPressed(_ sender: UILongPressGestureRecognizer) { - guard let camera = self.camera else { - return - } - switch sender.state { - case .began: - if recordsVideo && !camera.recordingVideo { - shutterButton.startRecordingVideo() - camera.startVideoRecording() - feedbackGenerator.startRecordingVideoFeedback() - } - case .ended: - if recordsVideo && camera.recordingVideo { - shutterButton.stopRecordingVideo() - camera.stopVideoRecording() - feedbackGenerator.endRecordingVideoFeedback() - } else { - feedbackGenerator.errorFeedback() - } - default: - break - } - } - - @objc func switchButtonTapped() { - switch self.position { - case .back: - self.position = .front - default: - self.position = .back - } - } - - @objc func torchButtonTapped() { - guard let camera = self.camera else { - return - } - camera.torchState = !camera.torchState - } -} - -// MARK: GestureRecognizer Delegate Methods - -extension LuminaViewController: UIGestureRecognizerDelegate { - public func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool { - if gestureRecognizer.isKind(of: UIPinchGestureRecognizer.self) { - beginZoomScale = currentZoomScale - } - return true - } -} - -// MARK: Tap to Focus Methods - -extension LuminaViewController { - func focusCamera(at point: CGPoint) { - if self.isUpdating == true { - return - } else { - self.isUpdating = true - } - let focusX = point.x/UIScreen.main.bounds.size.width - let focusY = point.y/UIScreen.main.bounds.size.height - guard let camera = self.camera else { - return - } - camera.handleFocus(at: CGPoint(x: focusX, y: focusY)) - showFocusView(at: point) - let deadlineTime = DispatchTime.now() + .seconds(1) - DispatchQueue.main.asyncAfter(deadline: deadlineTime) { - camera.resetCameraToContinuousExposureAndFocus() - } - } - - private func showFocusView(at point: CGPoint) { - let focusView: UIImageView = UIImageView(image: UIImage(named: "cameraFocus", in: Bundle(for: LuminaViewController.self), compatibleWith: nil)) - focusView.contentMode = .scaleAspectFit - focusView.frame = CGRect(x: 0, y: 0, width: 50, height: 50) - focusView.transform = CGAffineTransform(scaleX: 1.7, y: 1.7) - focusView.center = point - focusView.alpha = 0.0 - self.view.addSubview(focusView) - UIView.animate(withDuration: 0.3, animations: { - focusView.alpha = 1.0 - focusView.transform = CGAffineTransform(scaleX: 1.0, y: 1.0) - }, completion: { _ in - UIView.animate(withDuration: 1.0, animations: { - focusView.alpha = 0.0 - }, completion: { _ in - focusView.removeFromSuperview() - self.isUpdating = false - }) - }) - } -} diff --git a/Lumina/Lumina/UI/Extensions/Delegates/GestureRecognizerDelegateExtension.swift b/Lumina/Lumina/UI/Extensions/Delegates/GestureRecognizerDelegateExtension.swift new file mode 100644 index 0000000..ef89ecf --- /dev/null +++ b/Lumina/Lumina/UI/Extensions/Delegates/GestureRecognizerDelegateExtension.swift @@ -0,0 +1,18 @@ +// +// GestureRecognizerDelegateExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation + +extension LuminaViewController: UIGestureRecognizerDelegate { + public func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool { + if gestureRecognizer.isKind(of: UIPinchGestureRecognizer.self) { + beginZoomScale = currentZoomScale + } + return true + } +} diff --git a/Lumina/Lumina/UI/Extensions/Delegates/LuminaCameraDelegateExtension.swift b/Lumina/Lumina/UI/Extensions/Delegates/LuminaCameraDelegateExtension.swift new file mode 100644 index 0000000..f1497be --- /dev/null +++ b/Lumina/Lumina/UI/Extensions/Delegates/LuminaCameraDelegateExtension.swift @@ -0,0 +1,58 @@ +// +// LuminaCameraDelegateExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation + +extension LuminaViewController: LuminaCameraDelegate { + func videoRecordingCaptured(camera: LuminaCamera, videoURL: URL) { + delegate?.captured(videoAt: videoURL, from: self) + } + + func videoFrameCaptured(camera: LuminaCamera, frame: UIImage, predictedObjects: [LuminaPrediction]?) { + delegate?.streamed(videoFrame: frame, with: predictedObjects, from: self) + } + + func finishedFocus(camera: LuminaCamera) { + DispatchQueue.main.async { + self.isUpdating = false + } + } + + func stillImageCaptured(camera: LuminaCamera, image: UIImage, livePhotoURL: URL?, depthData: Any?) { + camera.currentPhotoCollection = nil + delegate?.captured(stillImage: image, livePhotoAt: livePhotoURL, depthData: depthData, from: self) + } + + func videoFrameCaptured(camera: LuminaCamera, frame: UIImage) { + delegate?.streamed(videoFrame: frame, from: self) + } + + func detected(camera: LuminaCamera, metadata: [Any]) { + delegate?.detected(metadata: metadata, from: self) + } + + func cameraSetupCompleted(camera: LuminaCamera, result: CameraSetupResult) { + handleCameraSetupResult(result) + } + + func cameraBeganTakingLivePhoto(camera: LuminaCamera) { + DispatchQueue.main.async { + self.textPrompt = "Capturing live photo..." + } + } + + func cameraFinishedTakingLivePhoto(camera: LuminaCamera) { + DispatchQueue.main.async { + self.textPrompt = "" + } + } + + func depthDataCaptured(camera: LuminaCamera, depthData: Any) { + delegate?.streamed(depthData: depthData, from: self) + } +} diff --git a/Lumina/Lumina/UI/Extensions/Delegates/LuminaDelegate.swift b/Lumina/Lumina/UI/Extensions/Delegates/LuminaDelegate.swift new file mode 100644 index 0000000..32b60b0 --- /dev/null +++ b/Lumina/Lumina/UI/Extensions/Delegates/LuminaDelegate.swift @@ -0,0 +1,86 @@ +// +// LuminaDelegate.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation + +/// Delegate for returning information to the application utilizing Lumina +public protocol LuminaDelegate: class { + + /// Triggered whenever a still image is captured by the user of Lumina + /// + /// - Parameters: + /// - stillImage: the image captured by Lumina + /// - livePhotoAt: the URL where the live photo file can be located and used, if enabled and available + /// - depthData: the depth data associated with the captured still image, if enabled and available (iOS 11.0 only) + /// - controller: the instance of Lumina that captured the still image + func captured(stillImage: UIImage, livePhotoAt: URL?, depthData: Any?, from controller: LuminaViewController) + + /// Triggered whenever a video is captured by the user of Lumina + /// + /// - Parameters: + /// - videoAt: the URL where the video file can be located and used + /// - controller: the instance of Lumina that captured the still image + func captured(videoAt: URL, from controller: LuminaViewController) + + /// Triggered whenever streamFrames is set to true on Lumina, and streams video frames as UIImage instances + /// + /// - Note: Will not be triggered unless streamFrames is true. False is default value + /// - Parameters: + /// - videoFrame: the frame captured by Lumina + /// - controller: the instance of Lumina that is streaming the frames + func streamed(videoFrame: UIImage, from controller: LuminaViewController) + + /// Triggered whenever a CoreML model is given to Lumina, and Lumina streams a video frame alongside a prediction + /// + /// - Note: Will not be triggered unless streamingModel resolves to not nil. Leaving the streamingModel parameter unset will not trigger this method + /// - Warning: The other method for passing video frames back via a delegate will not be triggered in the presence of a CoreML model + /// - Parameters: + /// - videoFrame: the frame captured by Lumina + /// - predictions: the predictions made by the model used with Lumina + /// - controller: the instance of Lumina that is streaming the frames + func streamed(videoFrame: UIImage, with predictions: [LuminaPrediction]?, from controller: LuminaViewController) + + /// Triggered whenever streamDepthData is set to true on Lumina, and streams depth data detected in the form of AVDepthData + /// + /// - Warning: This data is returned from type `Any`, and must be optionally downcast to `AVDepthData` by the user of Lumina. This is to maintain backwards compatibility with iOS 10.0 + /// - Note: This is only available on iOS 11.0 + /// - Parameters: + /// - depthData: buffer containing AVDepthData relevant to the streamed video frame + /// - controller: the instance of Lumina that is streaming the depth data + func streamed(depthData: Any, from controller: LuminaViewController) + + /// Triggered whenever trackMetadata is set to true on Lumina, and streams metadata detected in the form of QR codes, bar codes, or faces + /// + /// - Note: For list of all machine readable object types, aside from QR codes or faces, click [here](https://developer.apple.com/documentation/avfoundation/avmetadatamachinereadablecodeobject/machine_readable_object_types). + /// + /// - Warning: Objects returned in array must be casted to AVMetadataObject or AVMetadataFaceObject individually. + /// + /// - Parameters: + /// - metadata: the array of metadata that is captured. + /// - controller: the instance of Lumina that is streaming the metadata + func detected(metadata: [Any], from controller: LuminaViewController) + + /// Triggered whenever the cancel button is tapped on Lumina, with the intent of dismissing the UIViewController + /// + /// - Note: This is most usually used whenever + /// + /// - Parameter controller: the instance of Lumina that cancel was tapped on + func dismissed(controller: LuminaViewController) +} + +// MARK: Extension to make delegate functions optional + +public extension LuminaDelegate { + func captured(stillImage: UIImage, livePhotoAt: URL?, depthData: Any?, from controller: LuminaViewController) {} + func captured(videoAt: URL, from controller: LuminaViewController) {} + func streamed(videoFrame: UIImage, from controller: LuminaViewController) {} + func streamed(videoFrame: UIImage, with predictions: [LuminaPrediction]?, from controller: LuminaViewController) {} + func streamed(depthData: Any, from controller: LuminaViewController) {} + func detected(metadata: [Any], from controller: LuminaViewController) {} + func dismissed(controller: LuminaViewController) {} +} diff --git a/Lumina/Lumina/UI/Extensions/InterfaceHandlerExtension.swift b/Lumina/Lumina/UI/Extensions/InterfaceHandlerExtension.swift new file mode 100644 index 0000000..cab4260 --- /dev/null +++ b/Lumina/Lumina/UI/Extensions/InterfaceHandlerExtension.swift @@ -0,0 +1,129 @@ +// +// InterfaceHandlerExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +extension LuminaViewController { + @objc func handlePinchGestureRecognizer(recognizer: UIPinchGestureRecognizer) { + guard self.position == .back else { + return + } + currentZoomScale = min(maxZoomScale, max(1.0, beginZoomScale * Float(recognizer.scale))) + } + + @objc func handleTapGestureRecognizer(recognizer: UITapGestureRecognizer) { + if self.position == .back { + focusCamera(at: recognizer.location(in: self.view)) + } + } + + func createUI() { + self.view.layer.addSublayer(self.previewLayer) + self.view.addSubview(self.cancelButton) + self.view.addSubview(self.shutterButton) + self.view.addSubview(self.switchButton) + self.view.addSubview(self.torchButton) + self.view.addSubview(self.textPromptView) + self.view.addGestureRecognizer(self.zoomRecognizer) + self.view.addGestureRecognizer(self.focusRecognizer) + enableUI(valid: false) + } + + func enableUI(valid: Bool) { + DispatchQueue.main.async { + self.shutterButton.isEnabled = valid + self.switchButton.isEnabled = valid + self.torchButton.isEnabled = valid + } + } + + func updateUI(orientation: UIInterfaceOrientation) { + guard let connection = self.previewLayer.connection, connection.isVideoOrientationSupported else { + return + } + self.previewLayer.frame = self.view.bounds + connection.videoOrientation = necessaryVideoOrientation(for: orientation) + self.camera?.updateOutputVideoOrientation(connection.videoOrientation) + } + + func updateButtonFrames() { + self.cancelButton.center = CGPoint(x: self.view.frame.minX + 55, y: self.view.frame.maxY - 45) + if self.view.frame.width > self.view.frame.height { + self.shutterButton.center = CGPoint(x: self.view.frame.maxX - 45, y: self.view.frame.midY) + } else { + self.shutterButton.center = CGPoint(x: self.view.frame.midX, y: self.view.frame.maxY - 45) + } + self.switchButton.center = CGPoint(x: self.view.frame.maxX - 25, y: self.view.frame.minY + 25) + self.torchButton.center = CGPoint(x: self.view.frame.minX + 25, y: self.view.frame.minY + 25) + self.textPromptView.center = CGPoint(x: self.view.frame.midX, y: self.view.frame.minY + 45) + self.textPromptView.layoutSubviews() + } + + // swiftlint:disable cyclomatic_complexity + func handleCameraSetupResult(_ result: CameraSetupResult) { + DispatchQueue.main.async { + switch result { + case .videoSuccess: + if let camera = self.camera { + self.enableUI(valid: true) + camera.start() + } + case .audioSuccess: + break + case .requiresUpdate: + self.camera?.updateVideo({ result in + self.handleCameraSetupResult(result) + }) + case .videoPermissionDenied: + self.textPrompt = "Camera permissions for Lumina have been previously denied - please access your privacy settings to change this." + case .videoPermissionRestricted: + self.textPrompt = "Camera permissions for Lumina have been restricted - please access your privacy settings to change this." + case .videoRequiresAuthorization: + self.camera?.requestVideoPermissions() + case .audioPermissionRestricted: + self.textPrompt = "Audio permissions for Lumina have been restricted - please access your privacy settings to change this." + DispatchQueue.main.asyncAfter(deadline: .now() + 2) { + self.textPrompt = "" + } + case .audioRequiresAuthorization: + self.camera?.requestAudioPermissions() + case .audioPermissionDenied: + self.textPrompt = "Audio permissions for Lumina have been previously denied - please access your privacy settings to change this." + DispatchQueue.main.asyncAfter(deadline: .now() + 2) { + self.textPrompt = "" + } + case .invalidVideoDataOutput, + .invalidVideoInput, + .invalidPhotoOutput, + .invalidVideoMetadataOutput, + .invalidVideoFileOutput, + .invalidAudioInput, + .invalidDepthDataOutput: + self.textPrompt = "\(result.rawValue) - please try again" + case .unknownError: + self.textPrompt = "Unknown error occurred while loading Lumina - please try again" + } + } + } + + private func necessaryVideoOrientation(for statusBarOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation { + switch statusBarOrientation { + case .portrait: + return AVCaptureVideoOrientation.portrait + case .landscapeLeft: + return AVCaptureVideoOrientation.landscapeLeft + case .landscapeRight: + return AVCaptureVideoOrientation.landscapeRight + case .portraitUpsideDown: + return AVCaptureVideoOrientation.portraitUpsideDown + default: + return AVCaptureVideoOrientation.portrait + } + } +} diff --git a/Lumina/Lumina/UI/Extensions/ViewControllerButtonFunctions.swift b/Lumina/Lumina/UI/Extensions/ViewControllerButtonFunctions.swift new file mode 100644 index 0000000..4ba6857 --- /dev/null +++ b/Lumina/Lumina/UI/Extensions/ViewControllerButtonFunctions.swift @@ -0,0 +1,67 @@ +// +// ViewControllerButtonFunctions.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation + +extension LuminaViewController { + @objc func cancelButtonTapped() { + delegate?.dismissed(controller: self) + } + + @objc func shutterButtonTapped() { + shutterButton.takePhoto() + previewLayer.opacity = 0 + UIView.animate(withDuration: 0.25) { + self.previewLayer.opacity = 1 + } + guard let camera = self.camera else { + return + } + camera.captureStillImage() + } + + @objc func shutterButtonLongPressed(_ sender: UILongPressGestureRecognizer) { + guard let camera = self.camera else { + return + } + switch sender.state { + case .began: + if recordsVideo && !camera.recordingVideo { + shutterButton.startRecordingVideo() + camera.startVideoRecording() + feedbackGenerator.startRecordingVideoFeedback() + } + case .ended: + if recordsVideo && camera.recordingVideo { + shutterButton.stopRecordingVideo() + camera.stopVideoRecording() + feedbackGenerator.endRecordingVideoFeedback() + } else { + feedbackGenerator.errorFeedback() + } + default: + break + } + } + + @objc func switchButtonTapped() { + switch self.position { + case .back: + self.position = .front + default: + self.position = .back + } + } + + @objc func torchButtonTapped() { + guard let camera = self.camera else { + return + } + camera.torchState = !camera.torchState + } +} diff --git a/Lumina/Lumina/UI/Extensions/ViewControllerFocusHandlerExtension.swift b/Lumina/Lumina/UI/Extensions/ViewControllerFocusHandlerExtension.swift new file mode 100644 index 0000000..f6f3f6b --- /dev/null +++ b/Lumina/Lumina/UI/Extensions/ViewControllerFocusHandlerExtension.swift @@ -0,0 +1,51 @@ +// +// ViewControllerFocusHandlerExtension.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation + +extension LuminaViewController { + func focusCamera(at point: CGPoint) { + if self.isUpdating == true { + return + } else { + self.isUpdating = true + } + let focusX = point.x/UIScreen.main.bounds.size.width + let focusY = point.y/UIScreen.main.bounds.size.height + guard let camera = self.camera else { + return + } + camera.handleFocus(at: CGPoint(x: focusX, y: focusY)) + showFocusView(at: point) + let deadlineTime = DispatchTime.now() + .seconds(1) + DispatchQueue.main.asyncAfter(deadline: deadlineTime) { + camera.resetCameraToContinuousExposureAndFocus() + } + } + + private func showFocusView(at point: CGPoint) { + let focusView: UIImageView = UIImageView(image: UIImage(named: "cameraFocus", in: Bundle(for: LuminaViewController.self), compatibleWith: nil)) + focusView.contentMode = .scaleAspectFit + focusView.frame = CGRect(x: 0, y: 0, width: 50, height: 50) + focusView.transform = CGAffineTransform(scaleX: 1.7, y: 1.7) + focusView.center = point + focusView.alpha = 0.0 + self.view.addSubview(focusView) + UIView.animate(withDuration: 0.3, animations: { + focusView.alpha = 1.0 + focusView.transform = CGAffineTransform(scaleX: 1.0, y: 1.0) + }, completion: { _ in + UIView.animate(withDuration: 1.0, animations: { + focusView.alpha = 0.0 + }, completion: { _ in + focusView.removeFromSuperview() + self.isUpdating = false + }) + }) + } +} diff --git a/Lumina/Lumina/LuminaButton.swift b/Lumina/Lumina/UI/LuminaButton.swift similarity index 100% rename from Lumina/Lumina/LuminaButton.swift rename to Lumina/Lumina/UI/LuminaButton.swift diff --git a/Lumina/Lumina/UI/LuminaCameraViewSettings.swift b/Lumina/Lumina/UI/LuminaCameraViewSettings.swift new file mode 100644 index 0000000..48c91b3 --- /dev/null +++ b/Lumina/Lumina/UI/LuminaCameraViewSettings.swift @@ -0,0 +1,91 @@ +// +// LuminaCameraViewSettings.swift +// Lumina +// +// Created by David Okun on 11/20/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import Foundation +import AVFoundation + +/// The position of the camera that is active on Lumina +public enum CameraPosition { + /// the front facing camera of the iOS device + case front + /// the back (and usually main) facing camera of the iOS device + case back +} + +/// The resolution to set the camera to at any time - refer to AVCaptureSession.Preset definitions for matching, closest as of iOS 11 +public enum CameraResolution: String { + case low352x288 = "Low 352x288" + case vga640x480 = "VGA 640x480" + case medium1280x720 = "Medium 1280x720" + case high1920x1080 = "HD 1920x1080" + case ultra3840x2160 = "4K 3840x2160" + case iframe1280x720 = "iFrame 1280x720" + case iframe960x540 = "iFrame 960x540" + case photo = "Photo" + case lowest = "Lowest" + case medium = "Medium" + case highest = "Highest" + case inputPriority = "Input Priority" + + public static func all() -> [CameraResolution] { + return [CameraResolution.low352x288, CameraResolution.vga640x480, CameraResolution.medium1280x720, CameraResolution.high1920x1080, CameraResolution.ultra3840x2160, CameraResolution.iframe1280x720, CameraResolution.iframe960x540, CameraResolution.photo, CameraResolution.lowest, CameraResolution.medium, CameraResolution.highest, CameraResolution.inputPriority] + } + + // swiftlint:disable cyclomatic_complexity + func foundationPreset() -> AVCaptureSession.Preset { + switch self { + case .vga640x480: + return AVCaptureSession.Preset.vga640x480 + case .low352x288: + return AVCaptureSession.Preset.cif352x288 + case .medium1280x720: + return AVCaptureSession.Preset.hd1280x720 + case .high1920x1080: + return AVCaptureSession.Preset.hd1920x1080 + case .ultra3840x2160: + return AVCaptureSession.Preset.hd4K3840x2160 + case .iframe1280x720: + return AVCaptureSession.Preset.iFrame1280x720 + case .iframe960x540: + return AVCaptureSession.Preset.iFrame960x540 + case .photo: + return AVCaptureSession.Preset.photo + case .lowest: + return AVCaptureSession.Preset.low + case .medium: + return AVCaptureSession.Preset.medium + case .highest: + return AVCaptureSession.Preset.high + case .inputPriority: + return AVCaptureSession.Preset.inputPriority + } + } + + func getDimensions() -> CMVideoDimensions { + switch self { + case .vga640x480: + return CMVideoDimensions(width: 640, height: 480) + case .low352x288: + return CMVideoDimensions(width: 352, height: 288) + case .medium1280x720, .iframe1280x720, .medium: + return CMVideoDimensions(width: 1280, height: 720) + case .high1920x1080, .highest: + return CMVideoDimensions(width: 1920, height: 1080) + case .ultra3840x2160: + return CMVideoDimensions(width: 3840, height: 2160) + case .iframe960x540: + return CMVideoDimensions(width: 960, height: 540) + case .photo: + return CMVideoDimensions(width: INT32_MAX, height: INT32_MAX) + case .lowest: + return CMVideoDimensions(width: 352, height: 288) + case .inputPriority: + return CMVideoDimensions(width: INT32_MAX, height: INT32_MAX) + } + } +} diff --git a/Lumina/Lumina/LuminaTextPromptView.swift b/Lumina/Lumina/UI/LuminaTextPromptView.swift similarity index 100% rename from Lumina/Lumina/LuminaTextPromptView.swift rename to Lumina/Lumina/UI/LuminaTextPromptView.swift diff --git a/Lumina/Lumina/UI/LuminaViewController.swift b/Lumina/Lumina/UI/LuminaViewController.swift new file mode 100644 index 0000000..d58c2a7 --- /dev/null +++ b/Lumina/Lumina/UI/LuminaViewController.swift @@ -0,0 +1,350 @@ +// +// CameraViewController.swift +// CameraFramework +// +// Created by David Okun on 8/29/17. +// Copyright © 2017 David Okun. All rights reserved. +// + +import UIKit +import AVFoundation +import CoreML + +/// The main class that developers should interact with and instantiate when using Lumina +public final class LuminaViewController: UIViewController { + var camera: LuminaCamera? + + private var _previewLayer: AVCaptureVideoPreviewLayer? + var previewLayer: AVCaptureVideoPreviewLayer { + if let currentLayer = _previewLayer { + return currentLayer + } + guard let camera = self.camera, let layer = camera.getPreviewLayer() else { + return AVCaptureVideoPreviewLayer() + } + layer.frame = self.view.bounds + _previewLayer = layer + return layer + } + + private var _zoomRecognizer: UIPinchGestureRecognizer? + var zoomRecognizer: UIPinchGestureRecognizer { + if let currentRecognizer = _zoomRecognizer { + return currentRecognizer + } + let recognizer = UIPinchGestureRecognizer(target: self, action: #selector(handlePinchGestureRecognizer(recognizer:))) + recognizer.delegate = self + _zoomRecognizer = recognizer + return recognizer + } + + private var _focusRecognizer: UITapGestureRecognizer? + var focusRecognizer: UITapGestureRecognizer { + if let currentRecognizer = _focusRecognizer { + return currentRecognizer + } + let recognizer = UITapGestureRecognizer(target: self, action: #selector(handleTapGestureRecognizer(recognizer:))) + recognizer.delegate = self + _focusRecognizer = recognizer + return recognizer + } + + private var _feedbackGenerator: LuminaHapticFeedbackGenerator? + var feedbackGenerator: LuminaHapticFeedbackGenerator { + if let currentGenerator = _feedbackGenerator { + return currentGenerator + } + let generator = LuminaHapticFeedbackGenerator() + _feedbackGenerator = generator + return generator + } + + private var _cancelButton: LuminaButton? + var cancelButton: LuminaButton { + if let currentButton = _cancelButton { + return currentButton + } + let button = LuminaButton(with: SystemButtonType.cancel) + button.addTarget(self, action: #selector(cancelButtonTapped), for: .touchUpInside) + _cancelButton = button + return button + } + + private var _shutterButton: LuminaButton? + var shutterButton: LuminaButton { + if let currentButton = _shutterButton { + return currentButton + } + let button = LuminaButton(with: SystemButtonType.shutter) + button.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(shutterButtonTapped))) + button.addGestureRecognizer(UILongPressGestureRecognizer(target: self, action: #selector(shutterButtonLongPressed))) + _shutterButton = button + return button + } + + private var _switchButton: LuminaButton? + var switchButton: LuminaButton { + if let currentButton = _switchButton { + return currentButton + } + let button = LuminaButton(with: SystemButtonType.cameraSwitch) + button.addTarget(self, action: #selector(switchButtonTapped), for: .touchUpInside) + _switchButton = button + return button + } + + private var _torchButton: LuminaButton? + var torchButton: LuminaButton { + if let currentButton = _torchButton { + return currentButton + } + let button = LuminaButton(with: SystemButtonType.torch) + button.addTarget(self, action: #selector(torchButtonTapped), for: .touchUpInside) + _torchButton = button + return button + } + + private var _textPromptView: LuminaTextPromptView? + var textPromptView: LuminaTextPromptView { + if let existingView = _textPromptView { + return existingView + } + let promptView = LuminaTextPromptView() + _textPromptView = promptView + return promptView + } + + var isUpdating = false + + /// The delegate for streaming output from Lumina + weak open var delegate: LuminaDelegate? + + /// The position of the camera + /// + /// - Note: Responds live to being set at any time, and will update automatically + open var position: CameraPosition = .back { + didSet { + guard let camera = self.camera else { + return + } + camera.position = position + } + } + + /// Set this to choose whether or not Lumina will be able to record video by holding down the capture button + /// + /// - Note: Responds live to being set at any time, and will update automatically + /// + /// - Warning: This setting takes precedence over video data streaming - if this is turned on, frames cannot be streamed, nor can CoreML be used via Lumina's recognizer mechanism. + open var recordsVideo = false { + didSet { + self.camera?.recordsVideo = recordsVideo + } + } + + /// Set this to choose whether or not Lumina will stream video frames through the delegate + /// + /// - Note: Responds live to being set at any time, and will update automatically + /// + /// - Warning: Will not do anything if delegate is not implemented + open var streamFrames = false { + didSet { + self.camera?.streamFrames = streamFrames + } + } + + /// Set this to choose whether or not Lumina will stream machine readable metadata through the delegate + /// + /// - Note: Responds live to being set at any time, and will update automatically + /// + /// - Warning: Will not do anything if delegate is not implemented + open var trackMetadata = false { + didSet { + self.camera?.trackMetadata = trackMetadata + } + } + + /// Lumina comes ready with a view for a text prompt to give instructions to the user, and this is where you can set the text of that prompt + /// + /// - Note: Responds live to being set at any time, and will update automatically + /// + /// - Warning: If left empty, or unset, no view will be present, but view will be created if changed + open var textPrompt = "" { + didSet { + self.textPromptView.updateText(to: textPrompt) + } + } + + /// Set this to choose a resolution for the camera at any time - defaults to highest resolution possible for camera + /// + /// - Note: Responds live to being set at any time, and will update automatically + open var resolution: CameraResolution = .highest { + didSet { + self.camera?.resolution = resolution + } + } + + /// Set this to choose a frame rate for the camera at any time - defaults to 30 if query is not available + /// + /// - Note: Responds live to being set at any time, and will update automatically + open var frameRate: Int = 30 { + didSet { + self.camera?.frameRate = frameRate + } + } + + private var _streamingModel: AnyObject? + + /// A model that will be used when streaming images for object recognition + /// + /// - Note: Only works on iOS 11 and up + /// + /// - Warning: If this is set, streamFrames is over-ridden to true + @available(iOS 11.0, *) + open var streamingModel: MLModel? { + get { + return _streamingModel as? MLModel + } + set { + if newValue != nil { + _streamingModel = newValue + self.streamFrames = true + self.camera?.streamingModel = newValue + } + } + } + + /// The maximum amount of zoom that Lumina can use + /// + /// - Note: Default value will rely on whatever the active device can handle, if this is not explicitly set + open var maxZoomScale: Float = MAXFLOAT { + didSet { + self.camera?.maxZoomScale = maxZoomScale + } + } + + /// Set this to decide whether live photos will be captured whenever a still image is captured. + /// + /// - Note: Overrides cameraResolution to .photo + /// + /// - Warning: If video recording is enabled, live photos will not work. + open var captureLivePhotos: Bool = false { + didSet { + self.camera?.captureLivePhotos = captureLivePhotos + } + } + + /// Set this to return AVDepthData with a still captured image + /// + /// - Note: Only works on iOS 11.0 or higher + /// - Note: Only works with .photo, .medium1280x720, and .vga640x480 resolutions + open var captureDepthData: Bool = false { + didSet { + self.camera?.captureDepthData = captureDepthData + } + } + + /// Set this to return AVDepthData with streamed video frames + /// + /// - Note: Only works on iOS 11.0 or higher + /// - Note: Only works with .photo, .medium1280x720, and .vga640x480 resolutions + open var streamDepthData: Bool = false { + didSet { + self.camera?.streamDepthData = streamDepthData + } + } + +// open var capturesHighResolutionImages: Bool = false { +// didSet { +// self.camera?.capturesHighResolutionImages = capturesHighResolutionImages +// } +// } + + var currentZoomScale: Float = 1.0 { + didSet { + self.camera?.currentZoomScale = currentZoomScale + } + } + + var beginZoomScale: Float = 1.0 + + /// run this in order to create Lumina + public init() { + super.init(nibName: nil, bundle: nil) + let camera = LuminaCamera() + camera.delegate = self + self.camera = camera + } + + /// run this in order to create Lumina with a storyboard + public required init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + let camera = LuminaCamera() + camera.delegate = self + self.camera = camera + } + + /// override with caution + public override func didReceiveMemoryWarning() { + super.didReceiveMemoryWarning() + print("Camera framework is overloading on memory") + } + + /// override with caution + public override func viewWillAppear(_ animated: Bool) { + super.viewWillAppear(animated) + createUI() + self.camera?.updateVideo({ result in + self.handleCameraSetupResult(result) + }) + self.camera?.updateAudio({ result in + self.handleCameraSetupResult(result) + }) + } + + /// override with caution + public override func viewDidAppear(_ animated: Bool) { + super.viewDidAppear(animated) + feedbackGenerator.prepare() + } + + public override var shouldAutorotate: Bool { + guard let camera = self.camera else { + return true + } + return !camera.recordingVideo + } + + /// override with caution + public override func viewDidDisappear(_ animated: Bool) { + super.viewDidDisappear(true) + self.camera?.stop() + } + + /// override with caution + public override func viewWillLayoutSubviews() { + super.viewWillLayoutSubviews() + if self.camera?.recordingVideo == true { + return + } + updateUI(orientation: UIApplication.shared.statusBarOrientation) + updateButtonFrames() + } + + /// override with caution + override public var prefersStatusBarHidden: Bool { + return true + } + + /// returns a string of the version of Lumina currently in use, follows semantic versioning. + open class func getVersion() -> String? { + let bundle = Bundle(for: LuminaViewController.self) + guard let infoDictionary = bundle.infoDictionary else { + return nil + } + guard let versionString = infoDictionary["CFBundleShortVersionString"] as? String else { + return nil + } + return versionString + } +} diff --git a/Lumina/Lumina/Media.xcassets/Contents.json b/Lumina/Lumina/UI/Media.xcassets/Contents.json similarity index 100% rename from Lumina/Lumina/Media.xcassets/Contents.json rename to Lumina/Lumina/UI/Media.xcassets/Contents.json diff --git a/Lumina/Lumina/Media.xcassets/cameraFocus.imageset/Contents.json b/Lumina/Lumina/UI/Media.xcassets/cameraFocus.imageset/Contents.json similarity index 100% rename from Lumina/Lumina/Media.xcassets/cameraFocus.imageset/Contents.json rename to Lumina/Lumina/UI/Media.xcassets/cameraFocus.imageset/Contents.json diff --git a/Lumina/Lumina/Media.xcassets/cameraFocus.imageset/cameraFocus.png b/Lumina/Lumina/UI/Media.xcassets/cameraFocus.imageset/cameraFocus.png similarity index 100% rename from Lumina/Lumina/Media.xcassets/cameraFocus.imageset/cameraFocus.png rename to Lumina/Lumina/UI/Media.xcassets/cameraFocus.imageset/cameraFocus.png diff --git a/Lumina/Lumina/Media.xcassets/cameraSwitch.imageset/Contents.json b/Lumina/Lumina/UI/Media.xcassets/cameraSwitch.imageset/Contents.json similarity index 100% rename from Lumina/Lumina/Media.xcassets/cameraSwitch.imageset/Contents.json rename to Lumina/Lumina/UI/Media.xcassets/cameraSwitch.imageset/Contents.json diff --git a/Lumina/Lumina/Media.xcassets/cameraSwitch.imageset/cameraSwitch.png b/Lumina/Lumina/UI/Media.xcassets/cameraSwitch.imageset/cameraSwitch.png similarity index 100% rename from Lumina/Lumina/Media.xcassets/cameraSwitch.imageset/cameraSwitch.png rename to Lumina/Lumina/UI/Media.xcassets/cameraSwitch.imageset/cameraSwitch.png diff --git a/Lumina/Lumina/Media.xcassets/cameraTorch.imageset/Contents.json b/Lumina/Lumina/UI/Media.xcassets/cameraTorch.imageset/Contents.json similarity index 100% rename from Lumina/Lumina/Media.xcassets/cameraTorch.imageset/Contents.json rename to Lumina/Lumina/UI/Media.xcassets/cameraTorch.imageset/Contents.json diff --git a/Lumina/Lumina/Media.xcassets/cameraTorch.imageset/cameraTorch.png b/Lumina/Lumina/UI/Media.xcassets/cameraTorch.imageset/cameraTorch.png similarity index 100% rename from Lumina/Lumina/Media.xcassets/cameraTorch.imageset/cameraTorch.png rename to Lumina/Lumina/UI/Media.xcassets/cameraTorch.imageset/cameraTorch.png diff --git a/Lumina/Lumina/Info.plist b/Lumina/Lumina/Util/Info.plist similarity index 96% rename from Lumina/Lumina/Info.plist rename to Lumina/Lumina/Util/Info.plist index 4e99ba1..6f24fbf 100644 --- a/Lumina/Lumina/Info.plist +++ b/Lumina/Lumina/Util/Info.plist @@ -15,7 +15,7 @@ CFBundlePackageType FMWK CFBundleShortVersionString - 0.11.2 + 0.12.0 CFBundleVersion $(CURRENT_PROJECT_VERSION) NSPrincipalClass diff --git a/Lumina/Lumina/Lumina.h b/Lumina/Lumina/Util/Lumina.h similarity index 100% rename from Lumina/Lumina/Lumina.h rename to Lumina/Lumina/Util/Lumina.h diff --git a/LuminaSample/LuminaSample/Info.plist b/LuminaSample/LuminaSample/Info.plist index 626af8f..1de1348 100644 --- a/LuminaSample/LuminaSample/Info.plist +++ b/LuminaSample/LuminaSample/Info.plist @@ -15,7 +15,7 @@ CFBundlePackageType APPL CFBundleShortVersionString - 0.11.2 + 0.12.0 CFBundleVersion 1 LSRequiresIPhoneOS diff --git a/LuminaSample/LuminaSample/ViewController.swift b/LuminaSample/LuminaSample/ViewController.swift index 8e8811e..492803f 100644 --- a/LuminaSample/LuminaSample/ViewController.swift +++ b/LuminaSample/LuminaSample/ViewController.swift @@ -38,6 +38,8 @@ extension ViewController { //MARK: IBActions self.resolutionLabel.text = selectedResolution.rawValue if let version = LuminaViewController.getVersion() { self.title = "Lumina Sample v\(version)" + } else { + self.title = "Lumina Sample" } } @@ -147,16 +149,12 @@ extension ViewController: LuminaDelegate { func dismissed(controller: LuminaViewController) { controller.dismiss(animated: true, completion: nil) } - - } extension ViewController: ResolutionDelegate { func didSelect(resolution: CameraResolution, controller: ResolutionViewController) { selectedResolution = resolution - if let navigationController = self.navigationController { - navigationController.popToViewController(self, animated: true) - } + self.navigationController?.popToViewController(self, animated: true) } }