Reputation:
I'm new to Swift and I don't have a lot programming exercise so maybe this problem is simple to solve for you O:-)...
I only need a simple Camera with AVFoundation to capture pictures, no live mode or video recording is needed. I have a problem with the video orientation and wasn't able to find the answer in the forum by now. I know that many people have this problem, but all I tried did not work and I am not able to solve this problem in my code. I don't really understand how capturing pictures with AVFoundation works and where exactly the picture is taken...
Can you tell me how to set the device image orientation to the picture so that it will be shown with the device orientation when I use the picture in an image view?
Here is my code:
import UIKit
import AVFoundation
import Photos
class ViewController: UIViewController {
@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var imageProcessedView: UIImageView!
var imageProcessed: UIImage?
let captureSession = AVCaptureSession()
var captureDevice: AVCaptureDevice?
var previewLayer: AVCaptureVideoPreviewLayer?
var stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
override func viewDidLoad() {
super.viewDidLoad()
UIDevice.current.beginGeneratingDeviceOrientationNotifications()
imageProcessedView.alpha = 1.0
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
backCamera()
}
func backCamera()
{
let devices = AVCaptureDevice.devices()
for device in devices! {
if ((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
captureDevice = device as? AVCaptureDevice
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
} catch {
print("error")
}
break
}
}
}
//Session starts and preview appears:
@IBAction func takePhoto(_ sender: Any) {
if captureDevice != nil
{
imageProcessedView.alpha = 0.0
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraView.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.cameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
captureSession.startRunning()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput){
captureSession.addOutput(stillImageOutput)
}
}else{
print("No captureDevice")
}
}
@IBAction func capturePicture(_ sender: Any) {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo){
var currentDevice: UIDevice
currentDevice = .current
var deviceOrientation: UIDeviceOrientation
deviceOrientation = currentDevice.orientation
//variables to set the orientation as parameter like "portrait" and as rawValue like the integer 0 for portrait mode
var avCaptureOrientation: AVCaptureVideoOrientation
avCaptureOrientation = .portrait
var orientationValue: Int
orientationValue = 0
if deviceOrientation == .portrait {
avCaptureOrientation = .portrait
orientationValue = 0
print("Device: Portrait")
}else if (deviceOrientation == .landscapeLeft){
avCaptureOrientation = .landscapeLeft
orientationValue = 3
print("Device: LandscapeLeft")
}else if (deviceOrientation == .landscapeRight){
avCaptureOrientation = .landscapeRight
orientationValue = 2
print("Device LandscapeRight")
}else if (deviceOrientation == .portraitUpsideDown){
avCaptureOrientation = .portraitUpsideDown
orientationValue = 1
print("Device PortraitUpsideDown")
}else{
print("Unknown Device Orientation")
}
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: {(imageDataSampleBuffer, error) in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
let image = UIImage(data: imageData!)
self.imageProcessed = image!
//In this print command always "3" appears
print("Metadata Orientation: \(image!.imageOrientation.rawValue)")
//in this view the orientation is correct but later when I use the picture to display it in an imageView it is wrong
self.cameraView.backgroundColor = UIColor(patternImage: image!)
self.captureSession.stopRunning()
})
}
}
Thanks a lot!!!
Marina
Ok now I changed my code a little. I can see the correct DeviceOrientation and also the imageOrientation seems to be correct, as it is shown in the print command.
But I still have the problem, that putting the picture is orientated wrong when I put it into the imageView (it is also scaled a way too big).
The function where I want to set the picture into the image view is:
@IBAction func showCirclesInPic(_ sender: Any) {
if imageProcessed != nil {
previewLayer!.removeFromSuperlayer()
cameraView.addSubview(imageProcessedView)
if (imageProcessed!.imageOrientation != .up){
UIGraphicsBeginImageContextWithOptions(imageProcessed!.size, false, imageProcessed!.scale)
imageProcessed!.draw(in: CGRect(x:0, y:0, width: imageProcessed!.size.width, height: imageProcessed!.size.height))
imageProcessed = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
}
imageProcessedView.contentMode = .scaleAspectFit
imageProcessedView.image = imageProcessed
//print("ImageProcessed: \(ImageProcessingCV.showCircles(imageProcessed)!)")
}
}
Here is the rest of my changed Code:
import UIKit
import AVFoundation
import Photos
class ViewController: UIViewController {
@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var imageProcessedView: UIImageView!
var imageProcessed: UIImage?
let captureSession = AVCaptureSession()
var captureDevice: AVCaptureDevice?
var previewLayer: AVCaptureVideoPreviewLayer?
var stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
override func viewDidLoad() {
super.viewDidLoad()
imageProcessedView.alpha = 1.0
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
backCamera()
}
func backCamera()
{
let devices = AVCaptureDevice.devices()
for device in devices! {
if ((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
captureDevice = device as? AVCaptureDevice
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
} catch {
print("error")
}
break
}
}
}
@IBAction func takePhoto(_ sender: Any) {
if captureDevice != nil
{
imageProcessedView.alpha = 0.0
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraView.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.cameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
captureSession.startRunning()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput){
captureSession.addOutput(stillImageOutput)
}
}else{
print("No captureDevice")
}
}
@IBAction func capturePicture(_ sender: Any) {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo){
var currentDevice: UIDevice
currentDevice = .current
UIDevice.current.beginGeneratingDeviceOrientationNotifications()
var deviceOrientation: UIDeviceOrientation
deviceOrientation = currentDevice.orientation
var imageOrientation: UIImageOrientation?
if deviceOrientation == .portrait {
imageOrientation = .up
print("Device: Portrait")
}else if (deviceOrientation == .landscapeLeft){
imageOrientation = .left
print("Device: LandscapeLeft")
}else if (deviceOrientation == .landscapeRight){
imageOrientation = .right
print("Device LandscapeRight")
}else if (deviceOrientation == .portraitUpsideDown){
imageOrientation = .down
print("Device PortraitUpsideDown")
}else{
print("Unknown Device Orientation")
}
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: {(imageDataSampleBuffer, error) in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
let dataProvider = CGDataProvider(data: imageData! as CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent )
self.imageProcessed = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: imageOrientation!)
//print("Image Orientation: \(self.imageProcessed?.imageOrientation.rawValue)")
print("ImageO: \(imageOrientation!.rawValue)")
self.cameraView.backgroundColor = UIColor(patternImage: self.imageProcessed!)
self.captureSession.stopRunning()
})
}
}
@IBAction func focusAndExposeTap(_ gestureRecognizer: UITapGestureRecognizer) {
let devicePoint = self.previewLayer?.captureDevicePointOfInterest(for: gestureRecognizer.location(in: gestureRecognizer.view))
focus(with: .autoFocus, exposureMode: .autoExpose, at: devicePoint!, monitorSubjectAreaChange: true)
}
private func focus(with focusMode: AVCaptureFocusMode, exposureMode: AVCaptureExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool)
{
if let device = captureDevice
{
do{
try device.lockForConfiguration()
if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode)
{
device.focusPointOfInterest = devicePoint
device.focusMode = focusMode
}
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode)
{
device.exposurePointOfInterest = devicePoint
device.exposureMode = exposureMode
}
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
}catch{
print("Could not lock device for configuration: \(error)")
}
}
}
@IBAction func showCirclesInPic(_ sender: Any) {
if imageProcessed != nil {
previewLayer!.removeFromSuperlayer()
cameraView.addSubview(imageProcessedView)
if (imageProcessed!.imageOrientation != .up){
UIGraphicsBeginImageContextWithOptions(imageProcessed!.size, false, imageProcessed!.scale)
imageProcessed!.draw(in: CGRect(x:0, y:0, width: imageProcessed!.size.width, height: imageProcessed!.size.height))
imageProcessed = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
}
imageProcessedView.contentMode = .scaleAspectFit
imageProcessedView.image = imageProcessed
//print("ImageProcessed: \(ImageProcessingCV.showCircles(imageProcessed)!)")
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Upvotes: 1
Views: 1197
Reputation: 238
In order to change the orientation of the camera output while taking picture you should add a listener on the orientation changes using NotificationCenter. I will be attaching a sample code for a camera using AVFoundation that you may use.
Please note that the capture and flip camera are added programmatically. Even more, you can set an image for the flip cam(there is a comment to tell you where to set it).
Code Fixed
//
// CameraViewController.swift
// MatchFriend
//
// Created by Tarek Ezzat Abdallah on 10/1/16.
// Copyright © 2016 Tarek. All rights reserved.
//
import UIKit
import AVFoundation
class CameraViewController: UIViewController{
var captureSession: AVCaptureSession?
var previewLayer : AVCaptureVideoPreviewLayer?
var stillImageOutput: AVCaptureStillImageOutput?
var imageToSend: UIImage!
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
@IBOutlet weak var cameraView: UIView!
var captureButton: UIButton!
var flipCam:UIButton!
var camPos:String = "back"
func capture(_ sender: AnyObject)
{
if let videoConnection = stillImageOutput?.connection(withMediaType: AVMediaTypeVideo)
{
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection)
{
(imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent )
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
}
}
}
override func viewDidLoad()
{
super.viewDidLoad()
cameraView.backgroundColor = .clear
self.view.backgroundColor = .clear
captureButton = UIButton(frame: CGRect(x: 160, y: 580, width: 80, height: 80))
captureButton.addTarget(self, action: #selector(CameraViewController.capture(_:)), for: UIControlEvents.touchUpInside)
flipCam = UIButton(frame: CGRect(x: 0, y: 0, width: 40, height: 40))
flipCam.addTarget(self, action: #selector(CameraViewController.flip), for: UIControlEvents.touchUpInside)
self.view.addSubview(captureButton)
self.view.addSubview(flipCam)
captureButton.backgroundColor = UIColor.red
NotificationCenter.default.addObserver(self, selector: #selector(CameraViewController.rotated), name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
// let backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
camStart(camPos)
setViews()
}
func camStart(_ position: String)
{
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSessionPresetHigh
let videoDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
var capDevice:AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
if(position == "back")
{
for device in videoDevices!
{
let device = device as! AVCaptureDevice
if device.position == AVCaptureDevicePosition.back
{
capDevice = device
break
}
}
}
else{
for device in videoDevices!
{
let device = device as! AVCaptureDevice
if device.position == AVCaptureDevicePosition.front
{
capDevice = device
break
}
}
}
// var error : NSError?
let input = try? AVCaptureDeviceInput(device: capDevice)
if (captureSession?.canAddInput(input) != nil){
captureSession?.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if (captureSession?.canAddOutput(stillImageOutput) != nil){
captureSession?.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
previewLayer!.frame = self.view.layer.frame
previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
}
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
captureButton.frame = CGRect(x: self.view.frame.width/2 - 40, y: self.view.frame.height - 70 - 40, width: 80, height: 80)
flipCam.frame = CGRect(x: self.view.frame.width/6 - 40, y: self.view.frame.height - 60 - 40, width: 80, height: 80)
//add the image to flip camera
//flipCam.setImage(UIImage(named: "switchCam"), for: UIControlState.normal)
flipCam.setTitle("flip", for: .normal)
if previewLayer != nil{
previewLayer!.frame = self.view.frame
}
}
override func viewWillAppear(_ animated: Bool)
{
super.viewWillAppear(true)
setViews()
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func setViews()
{
captureButton.layer.cornerRadius = captureButton.layer.bounds.width/2
captureButton.layer.borderWidth = 1
}
func configureDevice()
{
let error: NSErrorPointer? = nil
if let device = captureDevice {
do {
try captureDevice!.lockForConfiguration()
} catch let error1 as NSError
{
error??.pointee = error1
}
device.focusMode = .locked
device.unlockForConfiguration()
}
}
func flip()
{
captureSession?.stopRunning()
if camPos == "back"
{
camPos = "front"
}
else{
camPos = "back"
}
camStart(camPos)
}
func rotated() {
if UIDeviceOrientationIsLandscape(UIDevice.current.orientation) {
if UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft{
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.landscapeRight
}
else
{
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft
}
}
if UIDeviceOrientationIsPortrait(UIDevice.current.orientation) {
if UIDevice.current.orientation == UIDeviceOrientation.portrait{
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
}
else
{
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portraitUpsideDown
}
}
}
}
Upvotes: 1