I have implemented a custom view with adding CALayer
as sublayer for UIView
. When I animate the view with the following:UIView.animateWithDuration(2.0) { self.slider.bounds.size *= 2.0}
, the scaling animation is kind of wrong. The CALayer
start at the wrong position with scaled size and move to the final position instead of scaling with the view.
The CustomeView Code :
import UIKit
class GridMaskView: UIView {
private let cornerLayer: CAShapeLayer
private let borderLayer: CAShapeLayer
private let gridLayer: CAShapeLayer
private let gridSize: (horizontal: UInt, vertical: UInt) = (3, 3)
private let cornerThickness: CGFloat = 3.0
private let cornerLength: CGFloat = 20.0
private let borderThickness: CGFloat = 2.0
private let gridThickness: CGFloat = 1.0
private let lineColor: UIColor = UIColor(r: 120, g: 179, b: 193, a: 1)
var showGridLines: Bool = true {
didSet {
gridLayer.hidden = !showGridLines
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override init(frame: CGRect) {
cornerLayer = CAShapeLayer()
cornerLayer.fillColor = lineColor.CGColor
borderLayer = CAShapeLayer()
borderLayer.fillColor = UIColor.clearColor().CGColor
borderLayer.strokeColor = lineColor.CGColor
borderLayer.lineWidth = borderThickness
gridLayer = CAShapeLayer()
gridLayer.strokeColor = lineColor.CGColor
gridLayer.lineWidth = gridThickness
super.init(frame: frame)
layer.addSublayer(cornerLayer)
layer.addSublayer(borderLayer)
layer.addSublayer(gridLayer)
}
override func layoutSubviews() {
super.layoutSubviews()
layoutLayers()
}
private func layoutLayers() {
drawCorner()
drawBorder()
drawGrid()
}
private func drawCorner() {
cornerLayer.frame = bounds.insetBy(dx: -cornerThickness, dy: -cornerThickness)
cornerLayer.path = cornerPath(forBounds: cornerLayer.bounds)
}
private func cornerPath(forBounds bounds: CGRect) -> CGPathRef {
let horizontalSize = CGSize(width: cornerLength, height: cornerThickness)
let verticalSize = CGSize(width: cornerThickness, height: cornerLength)
let corners: [(CGRectEdge, CGRectEdge)] = [(.MinXEdge, .MinYEdge), (.MinXEdge, .MaxYEdge), (.MaxXEdge, .MinYEdge), (.MaxXEdge, .MaxYEdge)]
var cornerRects = [CGRect]()
for corner in corners {
cornerRects.append(bounds.align(horizontalSize, corner: corner.0, corner.1))
cornerRects.append(bounds.align(verticalSize, corner: corner.0, corner.1))
}
let cornerPath = CGPathCreateMutable()
CGPathAddRects(cornerPath, nil, cornerRects, cornerRects.count)
return cornerPath
}
private func drawBorder() {
borderLayer.frame = bounds
borderLayer.path = borderPath(forBounds: borderLayer.bounds)
}
private func borderPath(forBounds bounds: CGRect) -> CGPathRef {
let borderPath = CGPathCreateMutable()
let borderCornerPoints = [bounds.topLeft, bounds.topRight, bounds.bottomRight, bounds.bottomLeft, bounds.topLeft]
CGPathAddLines(borderPath, nil, borderCornerPoints, borderCornerPoints.count)
return borderPath
}
private func drawGrid() {
gridLayer.frame = bounds
gridLayer.path = gridPath(forBounds: gridLayer.bounds)
}
private func gridPath(forBounds bounds: CGRect) -> CGPathRef {
let stepSize = bounds.size / (CGFloat(gridSize.horizontal), CGFloat(gridSize.vertical))
let gridPath = CGPathCreateMutable()
for i in (1...gridSize.vertical) {
let x = CGFloat(i) * stepSize.width
CGPathMoveToPoint(gridPath, nil, x, 0)
CGPathAddLineToPoint(gridPath, nil, x, bounds.size.height)
}
for i in (1...gridSize.horizontal) {
let y = CGFloat(i) * stepSize.height
CGPathMoveToPoint(gridPath, nil, 0, y)
CGPathAddLineToPoint(gridPath, nil, bounds.size.width, y)
}
return gridPath
}
override func intrinsicContentSize() -> CGSize {
return CGSize(width: cornerLength * 2, height: cornerLength * 2)
}
}
Anyone know how to fit this?