We capture video on iOS while using setPreferredVideoStabilizationMode:AVCaptureVideoStabilizationModeAuto
, but the video still sometimes comes out blurry at the start and at the end (fine in the middle, though), which is very problematic because we grab the first frame as a still image (in order to enable video & photo capabilities without switching camera modes).
Placing the device flat on a desk removes all blurriness, so the whole video is sharp throughout. This suggests it has something to do with video stabilization, but is there another property to set?
Does locking the focus mode matter?
Any other troubleshooting tips?
Here is the video capture function from PBJVision, which we use:
- (void)startVideoCapture
{
if (![self _canSessionCaptureWithOutput:_currentOutput] || _cameraMode != PBJCameraModeVideo) {
[self _failVideoCaptureWithErrorCode:PBJVisionErrorSessionFailed];
DLog(@"session is not setup properly for capture");
return;
}
DLog(@"starting video capture");
[self _enqueueBlockOnCaptureVideoQueue:^{
if (_flags.recording || _flags.paused)
return;
NSString *guid = [[NSUUID new] UUIDString];
NSString *outputFile = [NSString stringWithFormat:@"video_%@.mp4", guid];
if ([_delegate respondsToSelector:@selector(vision:willStartVideoCaptureToFile:)]) {
outputFile = [_delegate vision:self willStartVideoCaptureToFile:outputFile];
if (!outputFile) {
[self _failVideoCaptureWithErrorCode:PBJVisionErrorBadOutputFile];
return;
}
}
NSString *outputDirectory = (_captureDirectory == nil ? NSTemporaryDirectory() : _captureDirectory);
NSString *outputPath = [outputDirectory stringByAppendingPathComponent:outputFile];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath]) {
NSError *error = nil;
if (![[NSFileManager defaultManager] removeItemAtPath:outputPath error:&error]) {
[self _failVideoCaptureWithErrorCode:PBJVisionErrorOutputFileExists];
DLog(@"could not setup an output file (file exists)");
return;
}
}
if (!outputPath || [outputPath length] == 0) {
[self _failVideoCaptureWithErrorCode:PBJVisionErrorBadOutputFile];
DLog(@"could not setup an output file");
return;
}
if (_mediaWriter) {
_mediaWriter.delegate = nil;
_mediaWriter = nil;
}
_mediaWriter = [[PBJMediaWriter alloc] initWithOutputURL:outputURL];
_mediaWriter.delegate = self;
AVCaptureConnection *videoConnection = [_captureOutputVideo connectionWithMediaType:AVMediaTypeVideo];
[self _setOrientationForConnection:videoConnection];
_startTimestamp = CMClockGetTime(CMClockGetHostTimeClock());
_timeOffset = kCMTimeInvalid;
_flags.recording = YES;
_flags.paused = NO;
_flags.interrupted = NO;
_flags.videoWritten = NO;
_captureThumbnailTimes = [NSMutableSet set];
_captureThumbnailFrames = [NSMutableSet set];
if (_flags.thumbnailEnabled && _flags.defaultVideoThumbnails) {
[self captureVideoThumbnailAtFrame:0];
}
[self _enqueueBlockOnMainQueue:^{
if ([_delegate respondsToSelector:@selector(visionDidStartVideoCapture:)])
[_delegate visionDidStartVideoCapture:self];
}];
}];
}
This code configures PBJVision and starts video capture:
private func initPBJVision() {
// Configure PBJVision
pbj.delegate = self
pbj.cameraMode = .Video
pbj.cameraOrientation = .Portrait
pbj.focusMode = .AutoFocus
pbj.outputFormat = .Preset
pbj.cameraDevice = .Back
pbj.thumbnailEnabled = false
// Log status
print("Configured PBJVision")
pbj.startVideoCapture()
}
Once PBJ is ready with its preview, we make the camera focus on the midpoint of the screen.
// Called when PBJVision preview begins
func visionSessionDidStartPreview(vision: PBJVision) {
// Focus screen at midpoint
let focus_x = CGFloat(0.5)
let focus_y = CGFloat(0.5)
}