Reputation: 173
I use this code for rotate my image and send to azure storage.
My code:
func imageRotatedByDegrees(oldImage: UIImage, deg degrees: CGFloat) -> UIImage {
//Calculate the size of the rotated view's containing box for our drawing space
let rotatedViewBox: UIView = UIView(frame: CGRect(x: 0, y: 0, width: oldImage.size.width, height: oldImage.size.height))
let t: CGAffineTransform = CGAffineTransform(rotationAngle: degrees * CGFloat.pi / 180)
rotatedViewBox.transform = t
let rotatedSize: CGSize = rotatedViewBox.frame.size
//Create the bitmap context
UIGraphicsBeginImageContext(rotatedSize)
let bitmap: CGContext = UIGraphicsGetCurrentContext()!
//Move the origin to the middle of the image so we will rotate and scale around the center.
bitmap.translateBy(x: rotatedSize.width / 2, y: rotatedSize.height / 2)
//Rotate the image context
bitmap.rotate(by: (degrees * CGFloat.pi / 180))
//Now, draw the rotated/scaled image into the context
bitmap.scaleBy(x: 1.0, y: -1.0)
bitmap.draw(oldImage.cgImage!, in: CGRect(x: -oldImage.size.width / 2, y: -oldImage.size.height / 2, width: oldImage.size.width, height: oldImage.size.height))
let newImage: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return newImage
}
But this code Shrinks my image and crop. Can someone help me with this problem?
Upvotes: 0
Views: 1240
Reputation: 16774
I will paste here a full code for every possible rotation of the image. For those 90 degrees is a bit more simple as you have no need for background and you may simply initialize an image view with the image, apply transform to it and return its snapshot.
The code for any rotation:
class ImageTools {
static func rotatedImage(image: UIImage?, byDegress degress: CGFloat, backroundColor: UIColor? = nil) -> UIImage? {
guard let image = image else {
return nil
}
let angle = degress * CGFloat.pi / 180.0
let imageView = UIImageView(image: image)
let transform = CGAffineTransform(rotationAngle: angle)
let transformedSize: (CGSize) = {
let leftFromCenter = CGPoint(x: -imageView.frame.size.width*0.5, y: imageView.frame.size.height*0.5)
let rightFromCenter = CGPoint(x: imageView.frame.size.width*0.5, y: imageView.frame.size.height*0.5)
let leftTransformed = leftFromCenter.applying(transform)
let rightTransformed = rightFromCenter.applying(transform)
return CGSize(width: max(abs(leftTransformed.x*2.0), abs(rightTransformed.x*2.0)), height: max(abs(leftTransformed.y*2.0), abs(rightTransformed.y*2.0)))
}()
let canvasView = UIView(frame: CGRect(x: 0.0, y: 0.0, width: transformedSize.width, height: transformedSize.height))
canvasView.backgroundColor = backroundColor
canvasView.addSubview(imageView)
imageView.center = CGPoint(x: transformedSize.width*0.5, y: transformedSize.height*0.5)
imageView.transform = transform
return viewSnapshot(view: canvasView)
}
private static func viewSnapshot(view: UIView?) -> UIImage? {
guard let view = view else {
return nil
}
UIGraphicsBeginImageContext(view.bounds.size)
guard let context = UIGraphicsGetCurrentContext() else {
return nil
}
view.layer.render(in: context)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image
}
}
This procedure uses UIView
draw in context procedure which I believe is currently the best. In background quartz core is used or even better, the method is optimized and performs greatly. So there is no need to pull your hair with rotations, translations and scales. Nor is there an issue if the image already has an orientation.
Upvotes: 1