sync w/ upstream

This commit is contained in:
Peter Marathas 2018-11-23 13:45:25 -05:00
commit a672cd3da4
17 changed files with 682 additions and 349 deletions

View file

@ -1,5 +1,14 @@
# CHANGELOG
## Version 3.0.0
- Swift 4.2 support
## Version 2.6.0
- Fixed issue with crashing during video recording
- Using storyboard subviews should no longer require calling `bringSubvviewToFront`
- Changed demo project to use autolayout
- Reverted Swift4 changes to new branch
## Version 2.4.0
- Added support for no-audio video recording
- Added `audioEnabled` property
@ -85,4 +94,4 @@
- Flash for photos only flashes when photo is taken.
- Flash for videos enables the torch when video begins recording and disables flash when video finishes
- Fixed issue where **SwiftyCamDidChangeZoomLevel(zoomLevel:)** would be called from pinch gestures from front camera
- Minor bug fixes and enhancements
- Minor bug fixes and enhancements

View file

@ -7,6 +7,8 @@
objects = {
/* Begin PBXBuildFile section */
056AAB091F97CB1700F6A978 /* Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 056AAB081F97CB1700F6A978 /* Orientation.swift */; };
056AAB0A1F97CB1E00F6A978 /* Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 056AAB081F97CB1700F6A978 /* Orientation.swift */; };
05D2A9B81E80BE9700B479E9 /* SwiftyCam-iOS.h in Headers */ = {isa = PBXBuildFile; fileRef = 05D2A9B61E80BE9700B479E9 /* SwiftyCam-iOS.h */; settings = {ATTRIBUTES = (Public, ); }; };
05D2A9BC1E80BE9D00B479E9 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1675A9891E00A74A00B80903 /* PreviewView.swift */; };
05D2A9BD1E80BE9D00B479E9 /* SwiftyCamButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1675A98A1E00A74A00B80903 /* SwiftyCamButton.swift */; };
@ -27,6 +29,7 @@
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
056AAB081F97CB1700F6A978 /* Orientation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Orientation.swift; path = ../../Source/Orientation.swift; sourceTree = "<group>"; };
05D2A9B41E80BE9700B479E9 /* SwiftyCam.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SwiftyCam.framework; sourceTree = BUILT_PRODUCTS_DIR; };
05D2A9B61E80BE9700B479E9 /* SwiftyCam-iOS.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "SwiftyCam-iOS.h"; sourceTree = "<group>"; };
05D2A9B71E80BE9700B479E9 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
@ -111,6 +114,7 @@
1675A9911E00A74F00B80903 /* Source */ = {
isa = PBXGroup;
children = (
056AAB081F97CB1700F6A978 /* Orientation.swift */,
1675A9891E00A74A00B80903 /* PreviewView.swift */,
1675A98A1E00A74A00B80903 /* SwiftyCamButton.swift */,
1675A98B1E00A74A00B80903 /* SwiftyCamViewController.swift */,
@ -175,7 +179,7 @@
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0810;
LastUpgradeCheck = 0820;
LastUpgradeCheck = 1010;
ORGANIZATIONNAME = Cappsule;
TargetAttributes = {
05D2A9B31E80BE9700B479E9 = {
@ -184,7 +188,7 @@
};
1675A9711E00A68300B80903 = {
CreatedOnToolsVersion = 8.1;
DevelopmentTeam = DGV5BLXSF9;
LastSwiftMigration = 1010;
ProvisioningStyle = Automatic;
};
};
@ -234,6 +238,7 @@
buildActionMask = 2147483647;
files = (
05D2A9BC1E80BE9D00B479E9 /* PreviewView.swift in Sources */,
056AAB0A1F97CB1E00F6A978 /* Orientation.swift in Sources */,
05D2A9BF1E80BE9D00B479E9 /* SwiftyCamViewControllerDelegate.swift in Sources */,
05D2A9BE1E80BE9D00B479E9 /* SwiftyCamViewController.swift in Sources */,
05D2A9BD1E80BE9D00B479E9 /* SwiftyCamButton.swift in Sources */,
@ -250,6 +255,7 @@
1675A9761E00A68300B80903 /* AppDelegate.swift in Sources */,
1675A98F1E00A74A00B80903 /* SwiftyCamViewController.swift in Sources */,
168505EA1E288D80005B4537 /* PhotoViewController.swift in Sources */,
056AAB091F97CB1700F6A978 /* Orientation.swift in Sources */,
16298B561E2703DC0056D413 /* SwiftyRecordButton.swift in Sources */,
1675A98D1E00A74A00B80903 /* PreviewView.swift in Sources */,
1675A98E1E00A74A00B80903 /* SwiftyCamButton.swift in Sources */,
@ -294,7 +300,7 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.Cappsule.SwiftyCam-iOS";
PRODUCT_NAME = SwiftyCam;
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
@ -317,7 +323,7 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.Cappsule.SwiftyCam-iOS";
PRODUCT_NAME = SwiftyCam;
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
@ -333,15 +339,23 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_SUSPICIOUS_MOVES = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
@ -371,6 +385,8 @@
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_SWIFT3_OBJC_INFERENCE = Default;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
@ -383,15 +399,23 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_SUSPICIOUS_MOVES = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
@ -413,6 +437,8 @@
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_SWIFT3_OBJC_INFERENCE = Default;
SWIFT_VERSION = 4.0;
VALIDATE_PRODUCT = YES;
};
name = Release;
@ -421,13 +447,13 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = DGV5BLXSF9;
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = DemoSwiftyCam/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = com.Cappsule.DemoSwiftyCam;
PRODUCT_BUNDLE_IDENTIFIER = com.Walzy.DemoSwiftyCam1;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_VERSION = 4.2;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
@ -436,13 +462,13 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = DGV5BLXSF9;
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = DemoSwiftyCam/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = com.Cappsule.DemoSwiftyCam;
PRODUCT_BUNDLE_IDENTIFIER = com.Walzy.DemoSwiftyCam1;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_VERSION = 4.2;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0820"
LastUpgradeVersion = "1010"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0820"
LastUpgradeVersion = "0900"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
@ -26,6 +26,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
@ -45,6 +46,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"

View file

@ -22,7 +22,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}

View file

@ -1,10 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11762" systemVersion="16D17a" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11757"/>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
@ -19,11 +20,56 @@
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="hke-fJ-4fX" customClass="SwiftyRecordButton" customModule="DemoSwiftyCam" customModuleProvider="target">
<rect key="frame" x="150" y="572" width="75" height="75"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstAttribute="height" constant="75" id="8Mb-dy-Ned"/>
<constraint firstAttribute="width" constant="75" id="YVv-UJ-n1R"/>
</constraints>
</view>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="hS2-sy-kuv">
<rect key="frame" x="70" y="598" width="30" height="23"/>
<state key="normal" image="flipCamera"/>
<connections>
<action selector="cameraSwitchTapped:" destination="BYZ-38-t0r" eventType="touchUpInside" id="Du6-FK-hjA"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="stR-Rn-UdV">
<rect key="frame" x="275" y="594" width="18" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="8dD-oU-QV8"/>
<constraint firstAttribute="width" constant="18" id="rLN-FU-RYb"/>
</constraints>
<state key="normal" image="flashOutline"/>
<connections>
<action selector="toggleFlashTapped:" destination="BYZ-38-t0r" eventType="touchUpInside" id="C5u-0g-Kim"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="bottom" secondItem="hke-fJ-4fX" secondAttribute="bottom" constant="20" symbolic="YES" id="0OM-Pc-Ze1"/>
<constraint firstItem="hke-fJ-4fX" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="22M-gB-uKl"/>
<constraint firstItem="stR-Rn-UdV" firstAttribute="leading" secondItem="hke-fJ-4fX" secondAttribute="trailing" constant="50" id="4S2-fF-Ta9"/>
<constraint firstItem="hS2-sy-kuv" firstAttribute="centerY" secondItem="hke-fJ-4fX" secondAttribute="centerY" id="AuI-5x-aKM"/>
<constraint firstItem="hke-fJ-4fX" firstAttribute="leading" secondItem="hS2-sy-kuv" secondAttribute="trailing" constant="50" id="fY6-aF-vVL"/>
<constraint firstItem="stR-Rn-UdV" firstAttribute="centerY" secondItem="hke-fJ-4fX" secondAttribute="centerY" id="sJP-2Z-GAn"/>
</constraints>
</view>
<connections>
<outlet property="captureButton" destination="hke-fJ-4fX" id="nng-T2-6kL"/>
<outlet property="flashButton" destination="stR-Rn-UdV" id="WAJ-Ik-VF7"/>
<outlet property="flipCameraButton" destination="hS2-sy-kuv" id="lWF-Wl-Vg1"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
<resources>
<image name="flashOutline" width="18" height="30"/>
<image name="flipCamera" width="30" height="23"/>
</resources>
</document>

View file

@ -2,6 +2,8 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSPhotoLibraryUsageDescription</key>
<string>To save videos</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>

View file

@ -36,16 +36,16 @@ class PhotoViewController: UIViewController {
super.viewDidLoad()
self.view.backgroundColor = UIColor.gray
let backgroundImageView = UIImageView(frame: view.frame)
backgroundImageView.contentMode = UIViewContentMode.scaleAspectFit
backgroundImageView.contentMode = UIView.ContentMode.scaleAspectFit
backgroundImageView.image = backgroundImage
view.addSubview(backgroundImageView)
let cancelButton = UIButton(frame: CGRect(x: 10.0, y: 10.0, width: 30.0, height: 30.0))
cancelButton.setImage(#imageLiteral(resourceName: "cancel"), for: UIControlState())
cancelButton.setImage(#imageLiteral(resourceName: "cancel"), for: UIControl.State())
cancelButton.addTarget(self, action: #selector(cancel), for: .touchUpInside)
view.addSubview(cancelButton)
}
func cancel() {
@objc func cancel() {
dismiss(animated: true, completion: nil)
}
}

View file

@ -49,15 +49,32 @@ class VideoViewController: UIViewController {
playerController!.showsPlaybackControls = false
playerController!.player = player!
self.addChildViewController(playerController!)
self.addChild(playerController!)
self.view.addSubview(playerController!.view)
playerController!.view.frame = view.frame
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player!.currentItem)
let cancelButton = UIButton(frame: CGRect(x: 10.0, y: 10.0, width: 30.0, height: 30.0))
cancelButton.setImage(#imageLiteral(resourceName: "cancel"), for: UIControlState())
cancelButton.setImage(#imageLiteral(resourceName: "cancel"), for: UIControl.State())
cancelButton.addTarget(self, action: #selector(cancel), for: .touchUpInside)
view.addSubview(cancelButton)
// Allow background audio to continue to play
do {
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.ambient, mode: .default, options: [])
} else {
}
} catch let error as NSError {
print(error)
}
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch let error as NSError {
print(error)
}
}
override func viewDidAppear(_ animated: Bool) {
@ -65,14 +82,19 @@ class VideoViewController: UIViewController {
player?.play()
}
func cancel() {
@objc func cancel() {
dismiss(animated: true, completion: nil)
}
@objc fileprivate func playerItemDidReachEnd(_ notification: Notification) {
if self.player != nil {
self.player!.seek(to: kCMTimeZero)
self.player!.seek(to: CMTime.zero)
self.player!.play()
}
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionCategory(_ input: AVAudioSession.Category) -> String {
return input.rawValue
}

View file

@ -15,21 +15,25 @@ LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
import UIKit
import AVFoundation
class ViewController: SwiftyCamViewController, SwiftyCamViewControllerDelegate {
var flipCameraButton: UIButton!
var flashButton: UIButton!
var captureButton: SwiftyRecordButton!
@IBOutlet weak var captureButton : SwiftyRecordButton!
@IBOutlet weak var flipCameraButton : UIButton!
@IBOutlet weak var flashButton : UIButton!
override func viewDidLoad() {
super.viewDidLoad()
shouldPrompToAppSettings = true
cameraDelegate = self
maximumVideoDuration = 10.0
shouldUseDeviceOrientation = true
allowAutoRotate = true
audioEnabled = true
addButtons()
// disable capture button until session starts
captureButton.buttonEnabled = false
}
override var prefersStatusBarHidden: Bool {
@ -38,7 +42,19 @@ class ViewController: SwiftyCamViewController, SwiftyCamViewControllerDelegate {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
captureButton.delegate = self
}
func swiftyCamSessionDidStartRunning(_ swiftyCam: SwiftyCamViewController) {
print("Session did start running")
captureButton.buttonEnabled = true
}
func swiftyCamSessionDidStopRunning(_ swiftyCam: SwiftyCamViewController) {
print("Session did stop running")
captureButton.buttonEnabled = false
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didTake photo: UIImage) {
let newVC = PhotoViewController(image: photo)
@ -48,19 +64,13 @@ class ViewController: SwiftyCamViewController, SwiftyCamViewControllerDelegate {
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didBeginRecordingVideo camera: SwiftyCamViewController.CameraSelection) {
print("Did Begin Recording")
captureButton.growButton()
UIView.animate(withDuration: 0.25, animations: {
self.flashButton.alpha = 0.0
self.flipCameraButton.alpha = 0.0
})
hideButtons()
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishRecordingVideo camera: SwiftyCamViewController.CameraSelection) {
print("Did finish Recording")
captureButton.shrinkButton()
UIView.animate(withDuration: 0.25, animations: {
self.flashButton.alpha = 1.0
self.flipCameraButton.alpha = 1.0
})
showButtons()
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishProcessVideoAt url: URL) {
@ -69,62 +79,84 @@ class ViewController: SwiftyCamViewController, SwiftyCamViewControllerDelegate {
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFocusAtPoint point: CGPoint) {
let focusView = UIImageView(image: #imageLiteral(resourceName: "focus"))
focusView.center = point
focusView.alpha = 0.0
view.addSubview(focusView)
UIView.animate(withDuration: 0.25, delay: 0.0, options: .curveEaseInOut, animations: {
focusView.alpha = 1.0
focusView.transform = CGAffineTransform(scaleX: 1.25, y: 1.25)
}, completion: { (success) in
UIView.animate(withDuration: 0.15, delay: 0.5, options: .curveEaseInOut, animations: {
focusView.alpha = 0.0
focusView.transform = CGAffineTransform(translationX: 0.6, y: 0.6)
}, completion: { (success) in
focusView.removeFromSuperview()
})
})
print("Did focus at point: \(point)")
focusAnimationAt(point)
}
func swiftyCamDidFailToConfigure(_ swiftyCam: SwiftyCamViewController) {
let message = NSLocalizedString("Unable to capture media", comment: "Alert message when something goes wrong during capture session configuration")
let alertController = UIAlertController(title: "AVCam", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
present(alertController, animated: true, completion: nil)
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didChangeZoomLevel zoom: CGFloat) {
print("Zoom level did change. Level: \(zoom)")
print(zoom)
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didSwitchCameras camera: SwiftyCamViewController.CameraSelection) {
print("Camera did change to \(camera.rawValue)")
print(camera)
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFailToRecordVideo error: Error) {
print(error)
}
@objc private func cameraSwitchAction(_ sender: Any) {
switchCamera()
}
@objc private func toggleFlashAction(_ sender: Any) {
flashEnabled = !flashEnabled
if flashEnabled == true {
flashButton.setImage(#imageLiteral(resourceName: "flash"), for: UIControlState())
} else {
flashButton.setImage(#imageLiteral(resourceName: "flashOutline"), for: UIControlState())
}
}
private func addButtons() {
captureButton = SwiftyRecordButton(frame: CGRect(x: view.frame.midX - 37.5, y: view.frame.height - 100.0, width: 75.0, height: 75.0))
self.view.addSubview(captureButton)
captureButton.delegate = self
flipCameraButton = UIButton(frame: CGRect(x: (((view.frame.width / 2 - 37.5) / 2) - 15.0), y: view.frame.height - 74.0, width: 30.0, height: 23.0))
flipCameraButton.setImage(#imageLiteral(resourceName: "flipCamera"), for: UIControlState())
flipCameraButton.addTarget(self, action: #selector(cameraSwitchAction(_:)), for: .touchUpInside)
self.view.addSubview(flipCameraButton)
let test = CGFloat((view.frame.width - (view.frame.width / 2 + 37.5)) + ((view.frame.width / 2) - 37.5) - 9.0)
flashButton = UIButton(frame: CGRect(x: test, y: view.frame.height - 77.5, width: 18.0, height: 30.0))
flashButton.setImage(#imageLiteral(resourceName: "flashOutline"), for: UIControlState())
flashButton.addTarget(self, action: #selector(toggleFlashAction(_:)), for: .touchUpInside)
self.view.addSubview(flashButton)
}
@IBAction func cameraSwitchTapped(_ sender: Any) {
switchCamera()
}
@IBAction func toggleFlashTapped(_ sender: Any) {
flashEnabled = !flashEnabled
toggleFlashAnimation()
}
}
// UI Animations
extension ViewController {
fileprivate func hideButtons() {
UIView.animate(withDuration: 0.25) {
self.flashButton.alpha = 0.0
self.flipCameraButton.alpha = 0.0
}
}
fileprivate func showButtons() {
UIView.animate(withDuration: 0.25) {
self.flashButton.alpha = 1.0
self.flipCameraButton.alpha = 1.0
}
}
fileprivate func focusAnimationAt(_ point: CGPoint) {
let focusView = UIImageView(image: #imageLiteral(resourceName: "focus"))
focusView.center = point
focusView.alpha = 0.0
view.addSubview(focusView)
UIView.animate(withDuration: 0.25, delay: 0.0, options: .curveEaseInOut, animations: {
focusView.alpha = 1.0
focusView.transform = CGAffineTransform(scaleX: 1.25, y: 1.25)
}) { (success) in
UIView.animate(withDuration: 0.15, delay: 0.5, options: .curveEaseInOut, animations: {
focusView.alpha = 0.0
focusView.transform = CGAffineTransform(translationX: 0.6, y: 0.6)
}) { (success) in
focusView.removeFromSuperview()
}
}
}
fileprivate func toggleFlashAnimation() {
if flashEnabled == true {
flashButton.setImage(#imageLiteral(resourceName: "flash"), for: UIControl.State())
} else {
flashButton.setImage(#imageLiteral(resourceName: "flashOutline"), for: UIControl.State())
}
}
}

View file

@ -1,8 +1,8 @@
<img src="SwiftyCamBanner.jpg" align="center">
<img src="SwiftyCamBanner.jpg" align="center">
<p align="center">
<img src="https://img.shields.io/badge/platform-iOS%208%2B-blue.svg?style=flat" alt="Platform: iOS 8+"/>
<a href="https://developer.apple.com/swift"><img src="https://img.shields.io/badge/language-swift%203-4BC51D.svg?style=flat" alt="Language: Swift 3" /></a>
<a href="https://developer.apple.com/swift"><img src="https://img.shields.io/badge/language-swift%204.2-4BC51D.svg?style=flat" alt="Language: Swift 4.2" /></a>
<a href="https://cocoapods.org/pods/SwiftyCam"><img src="https://img.shields.io/cocoapods/v/SwiftyCam.svg?style=flat" alt="CocoaPods compatible" /></a>
<img src="http://img.shields.io/badge/license-BSD-lightgrey.svg?style=flat" alt="License: BSD" /> <br><br>
</p>
@ -13,10 +13,12 @@ SwiftyCam is a a simple, Snapchat-style iOS Camera framework for easy photo and
Configuring a Camera View Controller in AVFoundation can be tedious and time consuming. SwiftyCam is a drop in View Controller which gives complete control of the AVSession.
### For Swift 4 support, see Swift4 branch
## Features
| | SwiftyCam
| ------------------------------------- | ---------------------
| ------------------------------------- | ---------------------
| :sunglasses: | Snapchat-style media capture
| :+1: | Support iOS8+
| :camera: | Image capture
@ -29,13 +31,13 @@ Configuring a Camera View Controller in AVFoundation can be tedious and time con
| :lock: | Supports manual focus
| :last_quarter_moon_with_face: | Low light setting
| :speaker: | Background audio support
## Requirements
* iOS 8.0+
* Swift 3.0+
* Swift 4.2+
## License
@ -127,13 +129,13 @@ Capturing Video is just as easy. To begin recording video, call the `startVideoR
```swift
startVideoRecording()
```
```
To end the capture of a video, call the `stopVideoRecording` function:
```swift
stopVideoRecording()
```
```
### Delegate
@ -142,7 +144,7 @@ In order to acquire the photos and videos taken by either the SwiftyCamButton or
```swift
class MyCameraViewController : SwiftyCamViewController, SwiftyCamViewControllerDelegate {
override func viewDidLoad() {
super.viewDidLoad()
cameraDelegate = self
@ -158,34 +160,34 @@ func swiftyCam(_ swiftyCam: SwiftyCamViewController, didTake photo: UIImage) {
// Called when takePhoto() is called or if a SwiftyCamButton initiates a tap gesture
// Returns a UIImage captured from the current session
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didBeginRecordingVideo camera: SwiftyCamViewController.CameraSelection) {
// Called when startVideoRecording() is called
// Called when startVideoRecording() is called
// Called if a SwiftyCamButton begins a long press gesture
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishRecordingVideo camera: SwiftyCamViewController.CameraSelection) {
// Called when stopVideoRecording() is called
// Called when stopVideoRecording() is called
// Called if a SwiftyCamButton ends a long press gesture
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishProcessVideoAt url: URL) {
// Called when stopVideoRecording() is called and the video is finished processing
// Returns a URL in the temporary directory where video is stored
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFocusAtPoint point: CGPoint) {
// Called when a user initiates a tap gesture on the preview layer
// Will only be called if tapToFocus = true
// Returns a CGPoint of the tap location on the preview layer
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didChangeZoomLevel zoom: CGFloat) {
// Called when a user initiates a pinch gesture on the preview layer
// Will only be called if pinchToZoomn = true
// Returns a CGFloat of the current zoom level
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didSwitchCameras camera: SwiftyCamViewController.CameraSelection) {
// Called when user switches between cameras
// Returns current camera selection
@ -199,8 +201,8 @@ The flash(torch) can be enabled by changing the `flashEnabled` property:
```swift
flashEnabled = true
```
Flash is now supported for front and rear facing cameras.
Flash is now supported for front and rear facing cameras.
### Rear Camera
@ -247,7 +249,7 @@ SwiftyCam has several options for configurating the functionality of the capture
Video quality can be set by the **videoQuality** property of SwiftyCamViewController. The choices available correspond to the matching **AVCaptureSessionPreset**:
| VideoQuality | AVCaptureSessionPreset
| ------------------------------------- | ---------------------
| ------------------------------------- | ---------------------
| `.high` | **AVCapturePresetHigh**
| `.medium` | **AVCapturePresetMedium**
| `.low` | **AVCapturePresetLow**
@ -258,7 +260,7 @@ Video quality can be set by the **videoQuality** property of SwiftyCamViewContro
| `.resolution3840x2160` | **AVCaptureSessionPreset3840x2160**
| `.iframe960x540` | **AVCaptureSessionPresetiFrame960x540**
| `.iframe1280x720` | **AVCaptureSessionPresetiFrame1280x720**
The default value is **.high**. For use with the front-facing camera, **.high** will always be used.
@ -292,7 +294,7 @@ By default, ```swipeToZoom``` is enabled. The default gestures zoom in the captu
```swift
swipeToZoomInverted = true
```
```
You can also restrict the amount that the rear facing camera can zoom. To do this, use the `maxZoomScale` property:
@ -339,7 +341,7 @@ lowLightBoost = false
## Privacy
When a user firsts launch SwiftyCamViewController, they will be prompted for permission for access to the cameras and microphones. By default, if a user declines access to the hardware, SwiftyCam will provide a prompt to the App privacy settings inside the iOS settings application.
When a user firsts launch SwiftyCamViewController, they will be prompted for permission for access to the cameras and microphones. By default, if a user declines access to the hardware, SwiftyCam will provide a prompt to the App privacy settings inside the iOS settings application.
## Miscellaneous

110
Source/Orientation.swift Normal file
View file

@ -0,0 +1,110 @@
/*Copyright (c) 2016, Andrew Walz.
Redistribution and use in source and binary forms, with or without modification,are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
import Foundation
import AVFoundation
import UIKit
import CoreMotion
class Orientation {
var shouldUseDeviceOrientation: Bool = false
fileprivate var deviceOrientation : UIDeviceOrientation?
fileprivate let coreMotionManager = CMMotionManager()
init() {
coreMotionManager.accelerometerUpdateInterval = 0.1
}
func start() {
self.deviceOrientation = UIDevice.current.orientation
coreMotionManager.startAccelerometerUpdates(to: .main) { [weak self] (data, error) in
guard let data = data else {
return
}
self?.handleAccelerometerUpdate(data: data)
}
}
func stop() {
self.coreMotionManager.stopAccelerometerUpdates()
self.deviceOrientation = nil
}
func getImageOrientation(forCamera: SwiftyCamViewController.CameraSelection) -> UIImage.Orientation {
guard shouldUseDeviceOrientation, let deviceOrientation = self.deviceOrientation else { return forCamera == .rear ? .right : .leftMirrored }
switch deviceOrientation {
case .landscapeLeft:
return forCamera == .rear ? .up : .downMirrored
case .landscapeRight:
return forCamera == .rear ? .down : .upMirrored
case .portraitUpsideDown:
return forCamera == .rear ? .left : .rightMirrored
default:
return forCamera == .rear ? .right : .leftMirrored
}
}
func getPreviewLayerOrientation() -> AVCaptureVideoOrientation {
// Depends on layout orientation, not device orientation
switch UIApplication.shared.statusBarOrientation {
case .portrait, .unknown:
return AVCaptureVideoOrientation.portrait
case .landscapeLeft:
return AVCaptureVideoOrientation.landscapeLeft
case .landscapeRight:
return AVCaptureVideoOrientation.landscapeRight
case .portraitUpsideDown:
return AVCaptureVideoOrientation.portraitUpsideDown
}
}
func getVideoOrientation() -> AVCaptureVideoOrientation? {
guard shouldUseDeviceOrientation, let deviceOrientation = self.deviceOrientation else { return nil }
switch deviceOrientation {
case .landscapeLeft:
return .landscapeRight
case .landscapeRight:
return .landscapeLeft
case .portraitUpsideDown:
return .portraitUpsideDown
default:
return .portrait
}
}
private func handleAccelerometerUpdate(data: CMAccelerometerData){
if(abs(data.acceleration.y) < abs(data.acceleration.x)){
if(data.acceleration.x > 0){
deviceOrientation = UIDeviceOrientation.landscapeRight
} else {
deviceOrientation = UIDeviceOrientation.landscapeLeft
}
} else{
if(data.acceleration.y > 0){
deviceOrientation = UIDeviceOrientation.portraitUpsideDown
} else {
deviceOrientation = UIDeviceOrientation.portrait
}
}
}
}

View file

@ -55,11 +55,11 @@ class PreviewView: UIView {
let previewlayer = layer as! AVCaptureVideoPreviewLayer
switch gravity {
case .resize:
previewlayer.videoGravity = AVLayerVideoGravityResize
previewlayer.videoGravity = AVLayerVideoGravity.resize
case .resizeAspect:
previewlayer.videoGravity = AVLayerVideoGravityResizeAspect
previewlayer.videoGravity = AVLayerVideoGravity.resizeAspect
case .resizeAspectFill:
previewlayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewlayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
}
return previewlayer
}

View file

@ -54,6 +54,10 @@ open class SwiftyCamButton: UIButton {
public weak var delegate: SwiftyCamButtonDelegate?
// Sets whether button is enabled
public var buttonEnabled = true
/// Maximum duration variable
fileprivate var timer : Timer?
@ -76,12 +80,19 @@ open class SwiftyCamButton: UIButton {
/// UITapGestureRecognizer Function
@objc fileprivate func Tap() {
guard buttonEnabled == true else {
return
}
delegate?.buttonWasTapped()
}
/// UILongPressGestureRecognizer Function
@objc fileprivate func LongPress(_ sender:UILongPressGestureRecognizer!) {
guard buttonEnabled == true else {
return
}
switch sender.state {
case .began:
delegate?.buttonDidBeginLongPress()

View file

@ -27,13 +27,13 @@ open class SwiftyCamViewController: UIViewController {
/// Enumeration for Camera Selection
public enum CameraSelection {
public enum CameraSelection: String {
/// Camera on the back of the device
case rear
case rear = "rear"
/// Camera on the front of the device
case front
case front = "front"
}
/// Enumeration for video quality of the capture session. Corresponds to a AVCaptureSessionPreset
@ -112,7 +112,7 @@ open class SwiftyCamViewController: UIViewController {
/// Sets the maximum zoom scale allowed during gestures gesture
public var maxZoomScale = CGFloat.greatestFiniteMagnitude
/// Sets whether Tap to Focus and Tap to Adjust Exposure is enabled for the capture session
public var tapToFocus = true
@ -130,13 +130,13 @@ open class SwiftyCamViewController: UIViewController {
/// Sets whether a double tap to switch cameras is supported
public var doubleTapCameraSwitch = true
/// Sets whether swipe vertically to zoom is supported
public var swipeToZoom = true
/// Sets whether swipe vertically gestures should be inverted
public var swipeToZoomInverted = false
/// Set default launch camera
@ -145,22 +145,30 @@ open class SwiftyCamViewController: UIViewController {
/// Sets wether the taken photo or video should be oriented according to the device orientation
public var shouldUseDeviceOrientation = false
public var shouldUseDeviceOrientation = false {
didSet {
orientation.shouldUseDeviceOrientation = shouldUseDeviceOrientation
}
}
/// Sets whether or not View Controller supports auto rotation
public var allowAutoRotate = false
/// Specifies the [videoGravity](https://developer.apple.com/reference/avfoundation/avcapturevideopreviewlayer/1386708-videogravity) for the preview layer.
public var videoGravity : SwiftyCamVideoGravity = .resizeAspect
/// Sets whether or not video recordings will record audio
/// Setting to true will prompt user for access to microphone on View Controller launch.
public var audioEnabled = true
/// Sets whether or not app should display prompt to app settings if audio/video permission is denied
/// If set to false, delegate function will be called to handle exception
public var shouldPrompToAppSettings = true
/// Public access to Pinch Gesture
fileprivate(set) public var pinchGesture : UIPinchGestureRecognizer!
/// Public access to Pan Gesture
fileprivate(set) public var panGesture : UIPanGestureRecognizer!
@ -234,14 +242,18 @@ open class SwiftyCamViewController: UIViewController {
/// UIView for front facing flash
fileprivate var flashView : UIView?
/// Pan Translation
fileprivate var previousPanTranslation : CGFloat = 0.0
/// Last changed orientation
fileprivate var deviceOrientation : UIDeviceOrientation?
fileprivate var orientation : Orientation = Orientation()
/// Boolean to store when View Controller is notified session is running
fileprivate var sessionRunning = false
/// Disable view autorotation for forced portrait recorindg
@ -255,18 +267,20 @@ open class SwiftyCamViewController: UIViewController {
override open func viewDidLoad() {
super.viewDidLoad()
view = PreviewView(frame: view.frame, videoGravity: videoGravity)
previewLayer = view as! PreviewView!
previewLayer = PreviewView(frame: view.frame, videoGravity: videoGravity)
previewLayer.center = view.center
view.addSubview(previewLayer)
view.sendSubviewToBack(previewLayer)
// Add Gesture Recognizers
addGestureRecognizers()
previewLayer.session = session
// Test authorization status for Camera and Micophone
switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo){
switch AVCaptureDevice.authorizationStatus(for: AVMediaType.video) {
case .authorized:
// already authorized
@ -275,7 +289,7 @@ open class SwiftyCamViewController: UIViewController {
// not yet determined
sessionQueue.suspend()
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { [unowned self] granted in
AVCaptureDevice.requestAccess(for: AVMediaType.video, completionHandler: { [unowned self] granted in
if !granted {
self.setupResult = .notAuthorized
}
@ -292,67 +306,74 @@ open class SwiftyCamViewController: UIViewController {
}
// MARK: ViewDidLayoutSubviews
/// ViewDidLayoutSubviews() Implementation
/// ViewDidLayoutSubviews() Implementation
private func updatePreviewLayer(layer: AVCaptureConnection, orientation: AVCaptureVideoOrientation) {
layer.videoOrientation = orientation
previewLayer.frame = self.view.bounds
}
override open func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
if let connection = self.previewLayer?.videoPreviewLayer.connection {
let currentDevice: UIDevice = UIDevice.current
let orientation: UIDeviceOrientation = currentDevice.orientation
let previewLayerConnection : AVCaptureConnection = connection
if previewLayerConnection.isVideoOrientationSupported {
switch (orientation) {
case .portrait: updatePreviewLayer(layer: previewLayerConnection, orientation: .portrait)
break
case .landscapeRight: updatePreviewLayer(layer: previewLayerConnection, orientation: .landscapeLeft)
break
case .landscapeLeft: updatePreviewLayer(layer: previewLayerConnection, orientation: .landscapeRight)
break
case .portraitUpsideDown: updatePreviewLayer(layer: previewLayerConnection, orientation: .portraitUpsideDown)
break
default: updatePreviewLayer(layer: previewLayerConnection, orientation: .portrait)
break
}
}
}
}
// MARK: ViewWillAppear
/// ViewWillAppear(_ animated:) Implementation
open override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
NotificationCenter.default.addObserver(self, selector: #selector(captureSessionDidStartRunning), name: .AVCaptureSessionDidStartRunning, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(captureSessionDidStopRunning), name: .AVCaptureSessionDidStopRunning, object: nil)
}
// MARK: ViewDidAppear
/// ViewDidAppear(_ animated:) Implementation
override open func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// Subscribe to device rotation notifications
if shouldUseDeviceOrientation {
subscribeToDeviceOrientationChangeNotifications()
orientation.start()
}
// Set background audio preference
@ -365,23 +386,23 @@ open class SwiftyCamViewController: UIViewController {
// Begin Session
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
// Preview layer video orientation can be set only after the connection is created
DispatchQueue.main.async {
self.previewLayer.videoPreviewLayer.connection?.videoOrientation = self.getPreviewLayerOrientation()
self.previewLayer.videoPreviewLayer.connection?.videoOrientation = self.orientation.getPreviewLayerOrientation()
}
case .notAuthorized:
// Prompt to App Settings
self.promptToAppSettings()
if self.shouldPrompToAppSettings == true {
self.promptToAppSettings()
} else {
self.cameraDelegate?.swiftyCamNotAuthorized(self)
}
case .configurationFailed:
// Unknown Error
DispatchQueue.main.async(execute: { [unowned self] in
let message = NSLocalizedString("Unable to capture media", comment: "Alert message when something goes wrong during capture session configuration")
let alertController = UIAlertController(title: "AVCam", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
self.present(alertController, animated: true, completion: nil)
})
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCamDidFailToConfigure(self)
}
}
}
}
@ -394,6 +415,9 @@ open class SwiftyCamViewController: UIViewController {
override open func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
NotificationCenter.default.removeObserver(self)
sessionRunning = false
// If session is running, stop the session
if self.isSessionRunning == true {
self.session.stopRunning()
@ -405,7 +429,7 @@ open class SwiftyCamViewController: UIViewController {
// Unsubscribe from device rotation notifications
if shouldUseDeviceOrientation {
unsubscribeFromDeviceOrientationChangeNotifications()
orientation.stop()
}
}
@ -425,6 +449,7 @@ open class SwiftyCamViewController: UIViewController {
return
}
if device.hasFlash == true && flashEnabled == true /* TODO: Add Support for Retina Flash and add front flash */ {
changeFlashSettings(device: device, mode: .on)
capturePhotoAsyncronously(completionHandler: { (_) in })
@ -464,6 +489,11 @@ open class SwiftyCamViewController: UIViewController {
*/
public func startVideoRecording() {
guard sessionRunning == true else {
print("[SwiftyCam]: Cannot start video recoding. Capture session is not running")
return
}
guard let movieFileOutput = self.movieFileOutput else {
return
}
@ -479,6 +509,9 @@ open class SwiftyCamViewController: UIViewController {
previewLayer.addSubview(flashView!)
}
//Must be fetched before on main thread
let previewOrientation = previewLayer.videoPreviewLayer.connection!.videoOrientation
sessionQueue.async { [unowned self] in
if !movieFileOutput.isRecording {
if UIDevice.current.isMultitaskingSupported {
@ -486,7 +519,7 @@ open class SwiftyCamViewController: UIViewController {
}
// Update the orientation on the movie file output video connection before starting recording.
let movieFileOutputConnection = self.movieFileOutput?.connection(withMediaType: AVMediaTypeVideo)
let movieFileOutputConnection = self.movieFileOutput?.connection(with: AVMediaType.video)
//flip video output if front facing camera is selected
@ -494,12 +527,12 @@ open class SwiftyCamViewController: UIViewController {
movieFileOutputConnection?.isVideoMirrored = true
}
movieFileOutputConnection?.videoOrientation = self.getVideoOrientation()
movieFileOutputConnection?.videoOrientation = self.orientation.getVideoOrientation() ?? previewOrientation
// Start recording to a temporary file.
let outputFileName = UUID().uuidString
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mov")!)
movieFileOutput.startRecording(toOutputFileURL: URL(fileURLWithPath: outputFilePath), recordingDelegate: self)
movieFileOutput.startRecording(to: URL(fileURLWithPath: outputFilePath), recordingDelegate: self)
self.isVideoRecording = true
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCam(self, didBeginRecordingVideo: self.currentCamera)
@ -555,11 +588,11 @@ open class SwiftyCamViewController: UIViewController {
print("[SwiftyCam]: Switching between cameras while recording video is not supported")
return
}
guard session.isRunning == true else {
return
}
switch currentCamera {
case .front:
currentCamera = .rear
@ -574,7 +607,7 @@ open class SwiftyCamViewController: UIViewController {
// remove and re-add inputs and outputs
for input in self.session.inputs {
self.session.removeInput(input as! AVCaptureInput)
self.session.removeInput(input )
}
self.addInputs()
@ -630,14 +663,13 @@ open class SwiftyCamViewController: UIViewController {
/// Configure image quality preset
fileprivate func configureVideoPreset() {
if currentCamera == .front {
session.sessionPreset = videoInputPresetFromVideoQuality(quality: .high)
session.sessionPreset = AVCaptureSession.Preset(rawValue: videoInputPresetFromVideoQuality(quality: .high))
} else {
if session.canSetSessionPreset(videoInputPresetFromVideoQuality(quality: videoQuality)) {
session.sessionPreset = videoInputPresetFromVideoQuality(quality: videoQuality)
if session.canSetSessionPreset(AVCaptureSession.Preset(rawValue: videoInputPresetFromVideoQuality(quality: videoQuality))) {
session.sessionPreset = AVCaptureSession.Preset(rawValue: videoInputPresetFromVideoQuality(quality: videoQuality))
} else {
session.sessionPreset = videoInputPresetFromVideoQuality(quality: .high)
session.sessionPreset = AVCaptureSession.Preset(rawValue: videoInputPresetFromVideoQuality(quality: .high))
}
}
}
@ -647,9 +679,9 @@ open class SwiftyCamViewController: UIViewController {
fileprivate func addVideoInput() {
switch currentCamera {
case .front:
videoDevice = SwiftyCamViewController.deviceWithMediaType(AVMediaTypeVideo, preferringPosition: .front)
videoDevice = SwiftyCamViewController.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: .front)
case .rear:
videoDevice = SwiftyCamViewController.deviceWithMediaType(AVMediaTypeVideo, preferringPosition: .back)
videoDevice = SwiftyCamViewController.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: .back)
}
if let device = videoDevice {
@ -681,18 +713,20 @@ open class SwiftyCamViewController: UIViewController {
}
do {
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
print("[SwiftyCam]: Could not add video device input to the session")
print(session.canSetSessionPreset(videoInputPresetFromVideoQuality(quality: videoQuality)))
setupResult = .configurationFailed
session.commitConfiguration()
return
}
if let videoDevice = videoDevice {
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
print("[SwiftyCam]: Could not add video device input to the session")
print(session.canSetSessionPreset(AVCaptureSession.Preset(rawValue: videoInputPresetFromVideoQuality(quality: videoQuality))))
setupResult = .configurationFailed
session.commitConfiguration()
return
}
}
} catch {
print("[SwiftyCam]: Could not create video device input: \(error)")
setupResult = .configurationFailed
@ -707,17 +741,19 @@ open class SwiftyCamViewController: UIViewController {
return
}
do {
let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
}
else {
print("[SwiftyCam]: Could not add audio device input to the session")
}
}
catch {
if let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio){
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
} else {
print("[SwiftyCam]: Could not add audio device input to the session")
}
} else {
print("[SwiftyCam]: Could not find an audio device")
}
} catch {
print("[SwiftyCam]: Could not create audio device input: \(error)")
}
}
@ -729,7 +765,7 @@ open class SwiftyCamViewController: UIViewController {
if self.session.canAddOutput(movieFileOutput) {
self.session.addOutput(movieFileOutput)
if let connection = movieFileOutput.connection(withMediaType: AVMediaTypeVideo) {
if let connection = movieFileOutput.connection(with: AVMediaType.video) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
@ -750,67 +786,6 @@ open class SwiftyCamViewController: UIViewController {
}
}
/// Orientation management
fileprivate func subscribeToDeviceOrientationChangeNotifications() {
self.deviceOrientation = UIDevice.current.orientation
NotificationCenter.default.addObserver(self, selector: #selector(deviceDidRotate), name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
}
fileprivate func unsubscribeFromDeviceOrientationChangeNotifications() {
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
self.deviceOrientation = nil
}
@objc fileprivate func deviceDidRotate() {
if !UIDevice.current.orientation.isFlat {
self.deviceOrientation = UIDevice.current.orientation
}
}
fileprivate func getPreviewLayerOrientation() -> AVCaptureVideoOrientation {
// Depends on layout orientation, not device orientation
switch UIApplication.shared.statusBarOrientation {
case .portrait, .unknown:
return AVCaptureVideoOrientation.portrait
case .landscapeLeft:
return AVCaptureVideoOrientation.landscapeLeft
case .landscapeRight:
return AVCaptureVideoOrientation.landscapeRight
case .portraitUpsideDown:
return AVCaptureVideoOrientation.portraitUpsideDown
}
}
fileprivate func getVideoOrientation() -> AVCaptureVideoOrientation {
guard shouldUseDeviceOrientation, let deviceOrientation = self.deviceOrientation else { return previewLayer!.videoPreviewLayer.connection.videoOrientation }
switch deviceOrientation {
case .landscapeLeft:
return .landscapeRight
case .landscapeRight:
return .landscapeLeft
case .portraitUpsideDown:
return .portraitUpsideDown
default:
return .portrait
}
}
fileprivate func getImageOrientation(forCamera: CameraSelection) -> UIImageOrientation {
guard shouldUseDeviceOrientation, let deviceOrientation = self.deviceOrientation else { return forCamera == .rear ? .right : .leftMirrored }
switch deviceOrientation {
case .landscapeLeft:
return forCamera == .rear ? .up : .downMirrored
case .landscapeRight:
return forCamera == .rear ? .down : .upMirrored
case .portraitUpsideDown:
return forCamera == .rear ? .left : .rightMirrored
default:
return forCamera == .rear ? .right : .leftMirrored
}
}
/**
Returns a UIImage from Image Data.
@ -827,17 +802,23 @@ open class SwiftyCamViewController: UIViewController {
// Set proper orientation for photo
// If camera is currently set to front camera, flip image
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: self.getImageOrientation(forCamera: self.currentCamera))
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: self.orientation.getImageOrientation(forCamera: self.currentCamera))
return image
}
fileprivate func capturePhotoAsyncronously(completionHandler: @escaping(Bool) -> ()) {
if let videoConnection = photoFileOutput?.connection(withMediaType: AVMediaTypeVideo) {
guard sessionRunning == true else {
print("[SwiftyCam]: Cannot take photo. Capture session is not running")
return
}
if let videoConnection = photoFileOutput?.connection(with: AVMediaType.video) {
photoFileOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: {(sampleBuffer, error) in
if (sampleBuffer != nil) {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer!)
let image = self.processPhoto(imageData!)
// Call delegate and return new image
@ -865,9 +846,9 @@ open class SwiftyCamViewController: UIViewController {
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"), style: .default, handler: { action in
if #available(iOS 10.0, *) {
UIApplication.shared.openURL(URL(string: UIApplicationOpenSettingsURLString)!)
UIApplication.shared.openURL(URL(string: UIApplication.openSettingsURLString)!)
} else {
if let appSettings = URL(string: UIApplicationOpenSettingsURLString) {
if let appSettings = URL(string: UIApplication.openSettingsURLString) {
UIApplication.shared.openURL(appSettings)
}
}
@ -886,38 +867,54 @@ open class SwiftyCamViewController: UIViewController {
fileprivate func videoInputPresetFromVideoQuality(quality: VideoQuality) -> String {
switch quality {
case .high: return AVCaptureSessionPresetHigh
case .medium: return AVCaptureSessionPresetMedium
case .low: return AVCaptureSessionPresetLow
case .resolution352x288: return AVCaptureSessionPreset352x288
case .resolution640x480: return AVCaptureSessionPreset640x480
case .resolution1280x720: return AVCaptureSessionPreset1280x720
case .resolution1920x1080: return AVCaptureSessionPreset1920x1080
case .iframe960x540: return AVCaptureSessionPresetiFrame960x540
case .iframe1280x720: return AVCaptureSessionPresetiFrame1280x720
case .high: return AVCaptureSession.Preset.high.rawValue
case .medium: return AVCaptureSession.Preset.medium.rawValue
case .low: return AVCaptureSession.Preset.low.rawValue
case .resolution352x288: return AVCaptureSession.Preset.cif352x288.rawValue
case .resolution640x480: return AVCaptureSession.Preset.vga640x480.rawValue
case .resolution1280x720: return AVCaptureSession.Preset.hd1280x720.rawValue
case .resolution1920x1080: return AVCaptureSession.Preset.hd1920x1080.rawValue
case .iframe960x540: return AVCaptureSession.Preset.iFrame960x540.rawValue
case .iframe1280x720: return AVCaptureSession.Preset.iFrame1280x720.rawValue
case .resolution3840x2160:
if #available(iOS 9.0, *) {
return AVCaptureSessionPreset3840x2160
return AVCaptureSession.Preset.hd4K3840x2160.rawValue
}
else {
print("[SwiftyCam]: Resolution 3840x2160 not supported")
return AVCaptureSessionPresetHigh
return AVCaptureSession.Preset.high.rawValue
}
}
}
/// Get Devices
fileprivate class func deviceWithMediaType(_ mediaType: String, preferringPosition position: AVCaptureDevicePosition) -> AVCaptureDevice? {
if let devices = AVCaptureDevice.devices(withMediaType: mediaType) as? [AVCaptureDevice] {
return devices.filter({ $0.position == position }).first
fileprivate class func deviceWithMediaType(_ mediaType: String, preferringPosition position: AVCaptureDevice.Position) -> AVCaptureDevice? {
if #available(iOS 10.0, *) {
let avDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType(rawValue: mediaType), position: position)
return avDevice
} else {
// Fallback on earlier versions
let avDevice = AVCaptureDevice.devices(for: AVMediaType(rawValue: mediaType))
var avDeviceNum = 0
for device in avDevice {
print("deviceWithMediaType Position: \(device.position.rawValue)")
if device.position == position {
break
} else {
avDeviceNum += 1
}
}
return avDevice[avDeviceNum]
}
return nil
//return AVCaptureDevice.devices(for: AVMediaType(rawValue: mediaType), position: position).first
}
/// Enable or disable flash for photo
fileprivate func changeFlashSettings(device: AVCaptureDevice, mode: AVCaptureFlashMode) {
fileprivate func changeFlashSettings(device: AVCaptureDevice, mode: AVCaptureDevice.FlashMode) {
do {
try device.lockForConfiguration()
device.flashMode = mode
@ -951,17 +948,17 @@ open class SwiftyCamViewController: UIViewController {
return
}
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let device = AVCaptureDevice.default(for: AVMediaType.video)
// Check if device has a flash
if (device?.hasTorch)! {
do {
try device?.lockForConfiguration()
if (device?.torchMode == AVCaptureTorchMode.on) {
device?.torchMode = AVCaptureTorchMode.off
if (device?.torchMode == AVCaptureDevice.TorchMode.on) {
device?.torchMode = AVCaptureDevice.TorchMode.off
self.isCameraTorchOn = false
} else {
do {
try device?.setTorchModeOnWithLevel(1.0)
try device?.setTorchModeOn(level: 1.0)
self.isCameraTorchOn = true
} catch {
print("[SwiftyCam]: \(error)")
@ -980,15 +977,21 @@ open class SwiftyCamViewController: UIViewController {
guard allowBackgroundAudio == true else {
return
}
guard audioEnabled == true else {
return
}
do{
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord,
with: [.duckOthers, .defaultToSpeaker])
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: [.mixWithOthers, .allowBluetooth, .allowAirPlay, .allowBluetoothA2DP])
} else {
let options: [AVAudioSession.CategoryOptions] = [.mixWithOthers, .allowBluetooth]
let category = AVAudioSession.Category.playAndRecord
let selector = NSSelectorFromString("setCategory:withOptions:error:")
AVAudioSession.sharedInstance().perform(selector, with: category, with: options)
}
try AVAudioSession.sharedInstance().setActive(true)
session.automaticallyConfiguresApplicationAudioSession = false
}
catch {
@ -996,6 +999,24 @@ open class SwiftyCamViewController: UIViewController {
}
}
/// Called when Notification Center registers session starts running
@objc private func captureSessionDidStartRunning() {
sessionRunning = true
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCamSessionDidStartRunning(self)
}
}
/// Called when Notification Center registers session stops running
@objc private func captureSessionDidStopRunning() {
sessionRunning = false
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCamSessionDidStopRunning(self)
}
}
}
extension SwiftyCamViewController : SwiftyCamButtonDelegate {
@ -1038,23 +1059,27 @@ extension SwiftyCamViewController : AVCaptureFileOutputRecordingDelegate {
/// Process newly captured video and write it to temporary directory
public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if let currentBackgroundRecordingID = backgroundRecordingID {
backgroundRecordingID = UIBackgroundTaskInvalid
public func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let currentBackgroundRecordingID = backgroundRecordingID {
backgroundRecordingID = UIBackgroundTaskIdentifier.invalid
if currentBackgroundRecordingID != UIBackgroundTaskInvalid {
UIApplication.shared.endBackgroundTask(currentBackgroundRecordingID)
}
}
if error != nil {
print("[SwiftyCam]: Movie file finishing error: \(error)")
} else {
//Call delegate function with the URL of the outputfile
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCam(self, didFinishProcessVideoAt: outputFileURL)
}
}
}
if currentBackgroundRecordingID != UIBackgroundTaskIdentifier.invalid {
UIApplication.shared.endBackgroundTask(currentBackgroundRecordingID)
}
}
if let currentError = error {
print("[SwiftyCam]: Movie file finishing error: \(currentError)")
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCam(self, didFailToRecordVideo: currentError)
}
} else {
//Call delegate function with the URL of the outputfile
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCam(self, didFinishProcessVideoAt: outputFileURL)
}
}
}
}
// Mark: UIGestureRecognizer Declarations
@ -1065,11 +1090,11 @@ extension SwiftyCamViewController {
@objc fileprivate func zoomGesture(pinch: UIPinchGestureRecognizer) {
guard pinchToZoom == true && self.currentCamera == .rear else {
//ignore pinch
//ignore pinch
return
}
do {
let captureDevice = AVCaptureDevice.devices().first as? AVCaptureDevice
let captureDevice = AVCaptureDevice.devices().first
try captureDevice?.lockForConfiguration()
zoomScale = min(maxZoomScale, max(1.0, min(beginZoomScale * pinch.scale, captureDevice!.activeFormat.videoMaxZoomFactor)))
@ -1111,7 +1136,7 @@ extension SwiftyCamViewController {
device.focusMode = .autoFocus
}
device.exposurePointOfInterest = focusPoint
device.exposureMode = AVCaptureExposureMode.continuousAutoExposure
device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
device.unlockForConfiguration()
//Call delegate function and pass in the location of the touch
@ -1133,42 +1158,42 @@ extension SwiftyCamViewController {
}
switchCamera()
}
@objc private func panGesture(pan: UIPanGestureRecognizer) {
guard swipeToZoom == true && self.currentCamera == .rear else {
//ignore pan
return
}
let currentTranslation = pan.translation(in: view).y
let translationDifference = currentTranslation - previousPanTranslation
do {
let captureDevice = AVCaptureDevice.devices().first as? AVCaptureDevice
let captureDevice = AVCaptureDevice.devices().first
try captureDevice?.lockForConfiguration()
let currentZoom = captureDevice?.videoZoomFactor ?? 0.0
if swipeToZoomInverted == true {
zoomScale = min(maxZoomScale, max(1.0, min(currentZoom - (translationDifference / 75), captureDevice!.activeFormat.videoMaxZoomFactor)))
} else {
zoomScale = min(maxZoomScale, max(1.0, min(currentZoom + (translationDifference / 75), captureDevice!.activeFormat.videoMaxZoomFactor)))
}
captureDevice?.videoZoomFactor = zoomScale
// Call Delegate function with current zoom scale
DispatchQueue.main.async {
self.cameraDelegate?.swiftyCam(self, didChangeZoomLevel: self.zoomScale)
}
captureDevice?.unlockForConfiguration()
} catch {
print("[SwiftyCam]: Error locking configuration")
}
if pan.state == .ended || pan.state == .failed || pan.state == .cancelled {
previousPanTranslation = 0.0
} else {
@ -1197,7 +1222,7 @@ extension SwiftyCamViewController {
doubleTapGesture.numberOfTapsRequired = 2
doubleTapGesture.delegate = self
previewLayer.addGestureRecognizer(doubleTapGesture)
panGesture = UIPanGestureRecognizer(target: self, action: #selector(panGesture(pan:)))
panGesture.delegate = self
previewLayer.addGestureRecognizer(panGesture)
@ -1218,7 +1243,3 @@ extension SwiftyCamViewController : UIGestureRecognizerDelegate {
return true
}
}

View file

@ -15,6 +15,7 @@
import UIKit
import AVFoundation
// MARK: Public Protocol Declaration
@ -22,6 +23,24 @@ import UIKit
public protocol SwiftyCamViewControllerDelegate: class {
/**
SwiftyCamViewControllerDelegate function called when when SwiftyCamViewController session did start running.
Photos and video capture will be enabled.
- Parameter swiftyCam: Current SwiftyCamViewController
*/
func swiftyCamSessionDidStartRunning(_ swiftyCam: SwiftyCamViewController)
/**
SwiftyCamViewControllerDelegate function called when when SwiftyCamViewController session did stops running.
Photos and video capture will be disabled.
- Parameter swiftyCam: Current SwiftyCamViewController
*/
func swiftyCamSessionDidStopRunning(_ swiftyCam: SwiftyCamViewController)
/**
SwiftyCamViewControllerDelegate function called when the takePhoto() function is called.
@ -58,6 +77,15 @@ public protocol SwiftyCamViewControllerDelegate: class {
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishProcessVideoAt url: URL)
/**
SwiftyCamViewControllerDelegate function called when SwiftyCamViewController fails to record a video.
- Parameter swiftyCam: Current SwiftyCamViewController session
- Parameter error: An error object that describes the problem
*/
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFailToRecordVideo error: Error)
/**
SwiftyCamViewControllerDelegate function called when SwiftyCamViewController switches between front or rear camera.
@ -65,6 +93,7 @@ public protocol SwiftyCamViewControllerDelegate: class {
- Parameter camera: Current camera selection
*/
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didSwitchCameras camera: SwiftyCamViewController.CameraSelection)
/**
@ -85,10 +114,34 @@ public protocol SwiftyCamViewControllerDelegate: class {
*/
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didChangeZoomLevel zoom: CGFloat)
/**
SwiftyCamViewControllerDelegate function called when when SwiftyCamViewController fails to confiture the session.
- Parameter swiftyCam: Current SwiftyCamViewController
*/
func swiftyCamDidFailToConfigure(_ swiftyCam: SwiftyCamViewController)
/**
SwiftyCamViewControllerDelegate function called when when SwiftyCamViewController does not have access to camera or microphone.
- Parameter swiftyCam: Current SwiftyCamViewController
*/
func swiftyCamNotAuthorized(_ swiftyCam: SwiftyCamViewController)
}
public extension SwiftyCamViewControllerDelegate {
func swiftyCamSessionDidStopRunning(_ swiftyCam: SwiftyCamViewController) {
// Optional
}
func swiftyCamSessionDidStartRunning(_ swiftyCam: SwiftyCamViewController) {
// Optional
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didTake photo: UIImage) {
// Optional
}
@ -107,7 +160,10 @@ public extension SwiftyCamViewControllerDelegate {
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFinishProcessVideoAt url: URL) {
// Optional
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didFailToRecordVideo error: Error) {
// Optional
}
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didSwitchCameras camera: SwiftyCamViewController.CameraSelection) {
// Optional
@ -122,6 +178,14 @@ public extension SwiftyCamViewControllerDelegate {
func swiftyCam(_ swiftyCam: SwiftyCamViewController, didChangeZoomLevel zoom: CGFloat) {
// Optional
}
func swiftyCamDidFailToConfigure(_ swiftyCam: SwiftyCamViewController) {
// Optional
}
func swiftyCamNotAuthorized(_ swiftyCam: SwiftyCamViewController) {
// Optional
}
}

View file

@ -8,16 +8,10 @@
Pod::Spec.new do |s|
s.name = 'SwiftyCam'
s.version = '2.4.0'
s.version = '3.0.0'
s.summary = 'A Simple, Snapchat inspired camera Framework written in Swift'
s.ios.deployment_target = '8.0'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.swift_version = '4.2'
s.description = <<-DESC
A drop in Camera View Controller for capturing photos and videos from one AVSession. Written in Swift.
@ -33,12 +27,4 @@ A drop in Camera View Controller for capturing photos and videos from one AVSess
s.ios.deployment_target = '8.0'
s.source_files = 'Source/**/*'
# s.resource_bundles = {
# 'SwiftyCam' => ['SwiftyCam/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end