这是我用来将单个图像转换为视频的代码 . 这与iPhone 4S iOS 8.4完美配合但是我在iPhone 5s iOS 9.2.1上部署了我的应用程序然后它在导出时显示错误

print(exporter.error)

它正在显示

错误域= AVFoundationErrorDomain代码= -11820“无法完成导出”UserInfo = {NSLocalizedDescription =无法完成导出,NSLocalizedRecoverySuggestion =再次尝试导出 . }

var i:NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("temp", ofType: "mp4")!)

    //        firrstassest = AVURLAsset.assetWithURL(i) as AVURLAsset
    //        firrstassest = AVAsset.assetWithURL(i) as AVAsset
    firrstassest = AVAsset(URL: i)


    if firrstassest==nil{
        let alert = UIAlertView(title: "no asset loaded ", message: "please load video", delegate: nil, cancelButtonTitle: "ok")
        alert.show()
    }else{

        var mixComposition:AVMutableComposition = AVMutableComposition()

        var videTrack:AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32 (kCMPersistentTrackID_Invalid))
        var AudioTrack:AVMutableCompositionTrack=mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

        var cm = CMTimeMakeWithSeconds(22, 1)
//            var cm = CMTimeMakeWithSeconds(Float64(calculatingVideoDuration(imgArray.count, durationOfAudio: (VisualizationAPIImg.getDuratonInSec(audioUrl)))), 1)

        var b = firrstassest.tracksWithMediaType(AVMediaTypeVideo)
        print(cm)
        print(b[0])
        do{
            try videTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, cm), ofTrack: b[0] as AVAssetTrack, atTime: kCMTimeZero)


        }catch {
            print("Error1 videTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, cm), ofTrack: b[0] as AVAssetTrack, atTime: kCMTimeZero)")

        }

        var mainInstruction:AVMutableVideoCompositionInstruction=AVMutableVideoCompositionInstruction()

        mainInstruction.timeRange=CMTimeRangeMake(kCMTimeZero, cm)

        var videoLayerinstruction:AVMutableVideoCompositionLayerInstruction=AVMutableVideoCompositionLayerInstruction(assetTrack: videTrack)


        var videAssetTrack:AVAssetTrack = firrstassest.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack

        var videoAssetOrientation:UIImageOrientation = UIImageOrientation.Up

        var isVideoAssetPotriat=false

        var videotransform:CGAffineTransform=videAssetTrack.preferredTransform

        if videotransform.a == 0 && videotransform.b == 1.0 && videotransform.c == -1.0 && videotransform.d == 0 {
            videoAssetOrientation=UIImageOrientation.Right
            isVideoAssetPotriat=true
        }
        if videotransform.a == 0 && videotransform.b == -1.0 && videotransform.c == 1.0 && videotransform.d == 0 {
            videoAssetOrientation=UIImageOrientation.Left
            isVideoAssetPotriat=true
        }

        if videotransform.a == 1.0 && videotransform.b == 0 && videotransform.c == 0 && videotransform.d == 1.0 {
            videoAssetOrientation=UIImageOrientation.Up
            isVideoAssetPotriat=false
        }

        if videotransform.a == -1.0 && videotransform.b == 0 && videotransform.c == 0 && videotransform.d == -1.0 {
            videoAssetOrientation=UIImageOrientation.Down
            isVideoAssetPotriat=false
        }

        //videoLayerinstruction.setTransform(videAssetTrack.preferredTransform, atTime: kCMTimeZero)
        videoLayerinstruction.setTransform(videAssetTrack.preferredTransform, atTime: kCMTimeZero)

        mainInstruction.layerInstructions = NSArray(objects: videoLayerinstruction) as! [AVVideoCompositionLayerInstruction]

        var maincompositionInst:AVMutableVideoComposition = AVMutableVideoComposition()
        var naturalsize:CGSize

        naturalsize=CGSizeMake(image.size.width, image.size.height)

        var renderWidth:CGFloat=naturalsize.width
        var renderHeight:CGFloat=naturalsize.height
        maincompositionInst.renderSize=CGSizeMake(renderWidth, renderHeight)
        maincompositionInst.instructions=NSArray(object: mainInstruction) as! [AVVideoCompositionInstructionProtocol]
        maincompositionInst.frameDuration=CMTimeMake(1, 30)

        applyVideoEffectsToComposition(maincompositionInst, size: naturalsize,image: image,imgArray:imgArray,text:text)

        var paths=NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
        var documentDirectory:NSString=paths[0] as! NSString
        var myPathDocs = documentDirectory.stringByAppendingPathComponent(NSString(format: "FinalVideo-%d.mov", imgArray.indexOf(image)!) as String)

        do {
            let fileManager = NSFileManager()
            try fileManager.removeItemAtPath(myPathDocs as String)
        }catch {}

        var url = NSURL.fileURLWithPath(myPathDocs)

        var exporter:AVAssetExportSession!

        switch UIDevice.currentDevice().systemVersion.compare("9.1.0", options: NSStringCompareOptions.NumericSearch) {
        case .OrderedSame, .OrderedDescending:
            exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset640x480)
            print(UIDevice.currentDevice().systemVersion)

        case .OrderedAscending:
            exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset640x480)
        }


        exporter.outputURL=url
        exporter.outputFileType=AVFileTypeQuickTimeMovie
        exporter.shouldOptimizeForNetworkUse=true
        print(exporter.supportedFileTypes)
        exporter.videoComposition=maincompositionInst
        exporter.exportAsynchronouslyWithCompletionHandler({})

提前致谢