In an app I'm developing the user is required to take a "selfie" (Yes, I know, but the app is for private use only).
I've got everything working with the camera showing in the circular UIView region, however I cannot get it to scale and fill the circle properly. Here's what it's doing now:
And here's what I want it to be doing:
Here's the code for my UIView:
var cameraView = UIView()
cameraView.frame = CGRectMake(100, self.view.center.y-260, 568, 568)
cameraView.backgroundColor = UIColor(red:26/255, green:188/255, blue:156/255, alpha:1)
cameraView.layer.cornerRadius = 284
cameraView.layer.borderColor = UIColor.whiteColor().CGColor
cameraView.layer.borderWidth = 15
cameraView.contentMode = UIViewContentMode.ScaleToFill
cameraView.layer.masksToBounds = true
I have tried a few different contentMode options, include ScaleToFill, ScaleAspectFill, and ScaleAspectFit. They all generate the same exact result.
As it turns out, the camera's "self.previewLayer" has a property that determines how the camera's content fills a View.
In the following code I changed "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect" to "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill"
extension SelfieViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSessionPreset640x480
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
if err != nil {
println("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
var rootLayer :CALayer = self.cameraView.layer
rootLayer.masksToBounds=true
self.previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// do stuff here
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}