IOS Video Compression Swift iOS 8 corrupt video fi

2019-01-08 07:20发布

问题:

I am trying to compress video taken with the users camera from UIImagePickerController (Not an existing video but one on the fly) to upload to my server and take a small amount of time to do so, so a smaller size is ideal instead of 30-45 mb on newer quality cameras.

Here is the code to do a compression in swift for iOS 8 and it compresses wonderfully, i go from 35 mb down to 2.1 mb easily.

   func convertVideo(inputUrl: NSURL, outputURL: NSURL) 
   {
    //setup video writer
    var videoAsset = AVURLAsset(URL: inputUrl, options: nil) as AVAsset

    var videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack

    var videoSize = videoTrack.naturalSize

    var videoWriterCompressionSettings = Dictionary(dictionaryLiteral:(AVVideoAverageBitRateKey,NSNumber(integer:960000)))

    var videoWriterSettings = Dictionary(dictionaryLiteral:(AVVideoCodecKey,AVVideoCodecH264),
        (AVVideoCompressionPropertiesKey,videoWriterCompressionSettings),
        (AVVideoWidthKey,videoSize.width),
        (AVVideoHeightKey,videoSize.height))

    var videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoWriterSettings)

    videoWriterInput.expectsMediaDataInRealTime = true

    videoWriterInput.transform = videoTrack.preferredTransform


    var videoWriter = AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie, error: nil)

    videoWriter.addInput(videoWriterInput)

    var videoReaderSettings: [String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]

    var videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)

    var videoReader = AVAssetReader(asset: videoAsset, error: nil)

    videoReader.addOutput(videoReaderOutput)



    //setup audio writer
    var audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)

    audioWriterInput.expectsMediaDataInRealTime = false

    videoWriter.addInput(audioWriterInput)


    //setup audio reader

    var audioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as AVAssetTrack

    var audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) as AVAssetReaderOutput

    var audioReader = AVAssetReader(asset: videoAsset, error: nil)


    audioReader.addOutput(audioReaderOutput)

    videoWriter.startWriting()


    //start writing from video reader
    videoReader.startReading()

    videoWriter.startSessionAtSourceTime(kCMTimeZero)

    //dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue", nil)

    var queue = dispatch_queue_create("processingQueue", nil)

    videoWriterInput.requestMediaDataWhenReadyOnQueue(queue, usingBlock: { () -> Void in
        println("Export starting")

        while videoWriterInput.readyForMoreMediaData
        {
            var sampleBuffer:CMSampleBufferRef!

            sampleBuffer = videoReaderOutput.copyNextSampleBuffer()

            if (videoReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
            {
                videoWriterInput.appendSampleBuffer(sampleBuffer)

            }

            else
            {
                videoWriterInput.markAsFinished()

                if videoReader.status == AVAssetReaderStatus.Completed
                {
                    if audioReader.status == AVAssetReaderStatus.Reading || audioReader.status == AVAssetReaderStatus.Completed
                    {

                    }
                    else {


                        audioReader.startReading()

                        videoWriter.startSessionAtSourceTime(kCMTimeZero)

                        var queue2 = dispatch_queue_create("processingQueue2", nil)


                        audioWriterInput.requestMediaDataWhenReadyOnQueue(queue2, usingBlock: { () -> Void in

                            while audioWriterInput.readyForMoreMediaData
                            {
                                var sampleBuffer:CMSampleBufferRef!

                                sampleBuffer = audioReaderOutput.copyNextSampleBuffer()

                                println(sampleBuffer == nil)

                                if (audioReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
                                {
                                    audioWriterInput.appendSampleBuffer(sampleBuffer)

                                }

                                else
                                {
                                    audioWriterInput.markAsFinished()

                                    if (audioReader.status == AVAssetReaderStatus.Completed)
                                    {

                                        videoWriter.finishWritingWithCompletionHandler({ () -> Void in

                                            println("Finished writing video asset.")

                                            self.videoUrl = outputURL

                                                var data = NSData(contentsOfURL: outputURL)!

                                                 println("Byte Size After Compression: \(data.length / 1048576) mb")

                                                println(videoAsset.playable)

                                                //Networking().uploadVideo(data, fileName: "Test2")

                                            self.dismissViewControllerAnimated(true, completion: nil)

                                        })
                                        break
                                    }
                                }
                            }
                        })
                        break
                    }
                }
            }// Second if

        }//first while

    })// first block
   // return
}

Here is the code for my UIImagePickerController that calls the compress method

func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject])
{
    // Extract the media type from selection

    let type = info[UIImagePickerControllerMediaType] as String

    if (type == kUTTypeMovie)
    {

        self.videoUrl = info[UIImagePickerControllerMediaURL] as? NSURL

        var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("captured").stringByAppendingString(".mov"))

        var data = NSData(contentsOfURL: self.videoUrl!)!

        println("Size Before Compression: \(data.length / 1048576) mb")


        self.convertVideo(self.videoUrl!, outputURL: uploadUrl!)

        // Get the video from the info and set it appropriately.

        /*self.dismissViewControllerAnimated(true, completion: { () -> Void in


        //self.next.enabled = true

        })*/
    }
}

As i mentioned above this works as far as file size reduction, but when i get the file back (it is still of type .mov) quicktime cannot play it. Quicktime does try to convert it initially but fails halfway through (1-2 seconds after opening the file.) I've even tested the video file in AVPlayerController but it doesn't give any info about the movie, its just a play button without ant loading and without any length just "--" where the time is usually in the player. IE a corrupt file that won't play.

Im sure it has something to do with the settings for writing the asset out wether it is the video writing or the audio writing I'm not sure at all. It could even be the reading of the asset that is causing it to be corrupt. I've tried changing the variables around and setting different keys for reading and writing but i haven't found the right combination and this sucks that i can compress but get a corrupt file out of it. I'm not sure at all and any help would be appreciated. Pleeeeeeeeease.

回答1:

This answer has been completely rewritten and annotated to support Swift 4.0. Keep in mind that changing the AVFileType and presetName values allows you to tweak the final output in terms of size and quality.

import AVFoundation

extension ViewController: AVCaptureFileOutputRecordingDelegate {
    // Delegate function has been updated
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        // This code just exists for getting the before size. You can remove it from production code
        do {
            let data = try Data(contentsOf: outputFileURL)
            print("File size before compression: \(Double(data.count / 1048576)) mb")
        } catch {
            print("Error: \(error)")
        }
        // This line creates a generic filename based on UUID, but you may want to use your own
        // The extension must match with the AVFileType enum
        let path = NSTemporaryDirectory() + UUID().uuidString + ".m4v"
        let outputURL = URL.init(fileURLWithPath: path)
        let urlAsset = AVURLAsset(url: outputURL)
        // You can change the presetName value to obtain different results
        if let exportSession = AVAssetExportSession(asset: urlAsset,
                                                    presetName: AVAssetExportPresetMediumQuality) {
            exportSession.outputURL = outputURL
            // Changing the AVFileType enum gives you different options with
            // varying size and quality. Just ensure that the file extension
            // aligns with your choice
            exportSession.outputFileType = AVFileType.mov
            exportSession.exportAsynchronously {
                switch exportSession.status {
                case .unknown: break
                case .waiting: break
                case .exporting: break
                case .completed:
                    // This code only exists to provide the file size after compression. Should remove this from production code
                    do {
                        let data = try Data(contentsOf: outputFileURL)
                        print("File size after compression: \(Double(data.count / 1048576)) mb")
                    } catch {
                        print("Error: \(error)")
                    }
                case .failed: break
                case .cancelled: break
                }
            }
        }
    }
}

Below is the original answer as written for Swift 3.0:

extension ViewController: AVCaptureFileOutputRecordingDelegate {
    func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
        guard let data = NSData(contentsOf: outputFileURL as URL) else {
            return
        }

        print("File size before compression: \(Double(data.length / 1048576)) mb")
        let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
        compressVideo(inputURL: outputFileURL as URL, outputURL: compressedURL) { (exportSession) in
            guard let session = exportSession else {
                return
            }

            switch session.status {
            case .unknown:
                break
            case .waiting:
                break
            case .exporting:
                break
            case .completed:
                guard let compressedData = NSData(contentsOf: compressedURL) else {
                    return
                }

                print("File size after compression: \(Double(compressedData.length / 1048576)) mb")
            case .failed:
                break
            case .cancelled:
                break
            }
        }
    }

    func compressVideo(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
        let urlAsset = AVURLAsset(url: inputURL, options: nil)
        guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
            handler(nil)

            return
        }

        exportSession.outputURL = outputURL
        exportSession.outputFileType = AVFileTypeQuickTimeMovie
        exportSession.shouldOptimizeForNetworkUse = true
        exportSession.exportAsynchronously { () -> Void in
            handler(exportSession)
        }
    }
}


回答2:

Figured it out! Ok so there were 2 problems: 1 problem was with the videoWriter.finishWritingWithCompletionHandler function call. when this completion block gets executed it DOES NOT MEAN that the video writer has finished writing to the output url. So i had to check if the status was completed before i uploaded the actual video file. It's kind of a hack but this is what i did

   videoWriter.finishWritingWithCompletionHandler({() -> Void in

          while true
          {
            if videoWriter.status == .Completed 
            {
               var data = NSData(contentsOfURL: outputURL)!

               println("Finished: Byte Size After Compression: \(data.length / 1048576) mb")

               Networking().uploadVideo(data, fileName: "Video")

               self.dismissViewControllerAnimated(true, completion: nil)
               break
              }
            }
        })

The second problem I was having was a Failed status and that was because i kept writing to the same temp directory as shown in the code for the UIImagePickerController didFinishSelectingMediaWithInfo method in my question. So i just used the current date as a directory name so it would be unique.

var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("\(NSDate())").stringByAppendingString(".mov"))

[EDIT]: BETTER SOLUTION

Ok so after a lot of experimenting and months later I've found a damn good and much simpler solution for getting a video down from 45 mb down to 1.42 mb with pretty good quality.

Below is the function to call instead of the original convertVideo function. note that i had to write my own completion handler paramater which is called after the asynchronous export has finished. i just called it handler.

 func compressVideo(inputURL: NSURL, outputURL: NSURL, handler:(session: AVAssetExportSession)-> Void)
{
    var urlAsset = AVURLAsset(URL: inputURL, options: nil)

    var exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality)

    exportSession.outputURL = outputURL

    exportSession.outputFileType = AVFileTypeQuickTimeMovie

    exportSession.shouldOptimizeForNetworkUse = true

    exportSession.exportAsynchronouslyWithCompletionHandler { () -> Void in

        handler(session: exportSession)
    }

}

And here is the code in the uiimagepickercontrollerDidFinisPickingMediaWithInfo function.

self.compressVideo(inputURL!, outputURL: uploadUrl!, handler: { (handler) -> Void in

                if handler.status == AVAssetExportSessionStatus.Completed
                {
                    var data = NSData(contentsOfURL: uploadUrl!)

                    println("File size after compression: \(Double(data!.length / 1048576)) mb")

                    self.picker.dismissViewControllerAnimated(true, completion: nil)


                }

                else if handler.status == AVAssetExportSessionStatus.Failed
                {
                        let alert = UIAlertView(title: "Uh oh", message: " There was a problem compressing the video maybe you can try again later. Error: \(handler.error.localizedDescription)", delegate: nil, cancelButtonTitle: "Okay")

                        alert.show()

                    })
                }
             })


回答3:

Your conversion method is asynchronous, yet doesn't have a completion block. So how can your code know when the file is ready? Maybe you're using the file before it is been completely written.

The conversion itself also looks strange - audio and video are usually written in parallel, not in series.

Your miraculous compression ratio might indicate that you've written out fewer frames than you actually think.



回答4:

Here is the code with compatible with Swift 4.0 Compress video size before attach to an email in swift You can also track the progress of video compression