2011-01-19 38 views
14

Sự cố khi lưu video vào Thư viện iPhone. tôi có một loạt các UIImages, và hai nút, "convertToVideo" & "saveToiPhoneLib"tạo video từ mảng UIImages và lưu video vào thư viện iPhone. AVAssetLibrary + AVFoundation

 

-(IBAction) convertToVideo 
{ 
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); 

NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 

NSString *savedVideoPath = [documentsDirectory stringByAppendingPathComponent:@"videoOutput"]; 

printf(" \n\n\n-Video file == %s--\n\n\n",[savedVideoPath UTF8String]); 

[self writeImageAsMovie:imageArray toPath:savedVideoPath size:self.view.frame.size duration:3]; 
} 


here i'm passing the imageArray and savedVideoPath to the function below 


-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{ 

NSError *error = nil; 

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: 
      [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie 
       error:&error]; 


NSParameterAssert(videoWriter); 

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
      AVVideoCodecH264, AVVideoCodecKey, 
      [NSNumber numberWithInt:size.width], AVVideoWidthKey, 
      [NSNumber numberWithInt:size.height], AVVideoHeightKey, 
      nil]; 
AVAssetWriterInput* writerInput = [[AVAssetWriterInput 
      assetWriterInputWithMediaType:AVMediaTypeVideo 
      outputSettings:videoSettings] retain]; 


// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; 

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 
       assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput 
       sourcePixelBufferAttributes:nil]; 


NSParameterAssert(writerInput); 
NSParameterAssert([videoWriter canAddInput:writerInput]); 
[videoWriter addInput:writerInput]; 


//Start a session: 
[videoWriter startWriting]; 
[videoWriter startSessionAtSourceTime:kCMTimeZero]; 

CVPixelBufferRef buffer = NULL; 

//convert uiimage to CGImage. 

buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]]; 
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; 

//Write samples: 
...... 


//Finish the session: 
[writerInput markAsFinished]; 
[videoWriter finishWriting]; 
} 


generate a CVPixelBufferRef here 


- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image 
{ 
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
     [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
     [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, 
     nil]; 
    CVPixelBufferRef pxbuffer = NULL; 

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, self.view.frame.size.width, 
      self.view.frame.size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, 
      &pxbuffer); 
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0); 
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 
    NSParameterAssert(pxdata != NULL); 

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context = CGBitmapContextCreate(pxdata, self.view.frame.size.width, 
      self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace, 
      kCGImageAlphaNoneSkipFirst); 
    NSParameterAssert(context); 
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0)); 
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
      CGImageGetHeight(image)), image); 
    CGColorSpaceRelease(rgbColorSpace); 
    CGContextRelease(context); 

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

    return pxbuffer; 
} 

tiết kiệm đến thư viện iPhone

 

-(IBAction) saveToiPhoneLib 
{ 

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); 

NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 

NSString *getImagePath = [basePath stringByAppendingPathComponent:@"videoOutput"]; 

printf(" \n\n\n-Video file == %s--\n\n\n",[getImagePath UTF8String]); 

UISaveVideoAtPathToSavedPhotosAlbum (getImagePath,self, @selector(video:didFinishSavingWithError: contextInfo:), nil); 
} 


- (void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo { 
NSLog(@"Finished saving video with error: %@", error); 
} 

nhưng trong khi tiết kiệm im thông báo lỗi nhận được: -

Hoàn thành lưu video có lỗi: Tên miền lỗi = ALAssetsLibraryErrorDomain Code = -3302 "Dữ liệu không hợp lệ" UserInfo = 0x1d59f0 {NSLocalizedFailureReason = Đã xảy ra sự cố khi viết nội dung này vì dữ liệu không hợp lệ và không thể xem hoặc pla yed., NSLocalizedRecoverySuggestion = Hãy thử với các dữ liệu khác nhau, NSLocalizedDescription = Dữ liệu không hợp lệ}

hãy cho tôi biết lỗi của tôi. cảm ơn trước

+0

một ví dụ mã cho tiền thưởng sẽ được tốt đẹp. – DasBoot

Trả lời

0

Đó chỉ đơn giản là quá nhiều mã để kiểm tra lỗi. Đảm bảo bạn có thể bắt đầu phiên xuất, bạn thực sự nhận được bộ đệm pixel cho hình ảnh của mình, rằng người viết sẵn sàng nhận thêm dữ liệu, bộ đệm được thêm vào mà không có lỗi, phiên xuất kết thúc thành công và tệp phim đầu ra tồn tại và thực sự chứa một số dữ liệu. Chỉ sau đó bạn có thể thử và lưu nó vào album ảnh hệ thống. Kiểm tra tất cả các thông tin lỗi có sẵn trên đường đi để bạn có thể biết điều gì xảy ra lần đầu tiên. (. Một điều nữa là bạn chỉ đơn giản là lấy mã từ web và dán nó lại với nhau, mà chỉ đơn giản là không đi làm việc cho chương trình AV)

0

Vâng, tôi đã có lỗi tương tự:

Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo=0x193ce0 {NSLocalizedRecoverySuggestion=Try saving again., NSUnderlyingError=0x179e40 "The operation couldn’t be completed. (OSStatus error -12412.)", NSLocalizedDescription=Cannot Save} 

Nhưng chỉ trên giả lập, khi tôi chạy trên một thiết bị, lưu vào thư viện ảnh làm việc tốt.

+8

11823 lỗi xảy ra khi tệp đã tồn tại trên đường dẫn nơi bạn đang cố gắng lưu tệp, do đó, hãy làm một việc trước khi lưu vào thư mục tài liệu sẽ xóa tệp trên đường dẫn đó. – aToz

5
-(void)convertimagetoVideo 
{ 
    ///////////// setup OR function def if we move this to a separate function //////////// 
    // this should be moved to its own function, that can take an imageArray, videoOutputPath, etc... 


NSError *error = nil; 


// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists... 
//NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4"; 
NSFileManager *fileMgr = [NSFileManager defaultManager]; 
NSString *documentsDirectory = [NSHomeDirectory() 
           stringByAppendingPathComponent:@"Documents"]; 
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"]; 
//NSLog(@"-->videoOutputPath= %@", videoOutputPath); 
// get rid of existing mp4 if exists... 
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES) 
    NSLog(@"Unable to delete file: %@", [error localizedDescription]); 


CGSize imageSize = CGSizeMake(400, 200); 
// NSUInteger fps = 30; 
    NSUInteger fps = 30; 

//NSMutableArray *imageArray; 
//imageArray = [[NSMutableArray alloc] initWithObjects:@"download.jpeg", @"download2.jpeg", nil]; 
NSMutableArray *imageArray; 
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"png" inDirectory:nil]; 
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count]; 
NSLog(@"-->imageArray.count= %i", imageArray.count); 
for (NSString* path in imagePaths) 
{ 
    [imageArray addObject:[UIImage imageWithContentsOfFile:path]]; 
    //NSLog(@"-->image path= %@", path); 
} 

//////////////  end setup /////////////////////////////////// 

NSLog(@"Start building video from defined frames."); 

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: 
           [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie 
                  error:&error]; 
NSParameterAssert(videoWriter); 

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
           AVVideoCodecH264, AVVideoCodecKey, 
           [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey, 
           [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey, 
           nil]; 

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput 
             assetWriterInputWithMediaType:AVMediaTypeVideo 
             outputSettings:videoSettings]; 


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 
               assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput 
               sourcePixelBufferAttributes:nil]; 

NSParameterAssert(videoWriterInput); 
NSParameterAssert([videoWriter canAddInput:videoWriterInput]); 
videoWriterInput.expectsMediaDataInRealTime = YES; 
[videoWriter addInput:videoWriterInput]; 

//Start a session: 
[videoWriter startWriting]; 
[videoWriter startSessionAtSourceTime:kCMTimeZero]; 

CVPixelBufferRef buffer = NULL; 

//convert uiimage to CGImage. 
int frameCount = 0; 
double numberOfSecondsPerFrame = 6; 
double frameDuration = fps * numberOfSecondsPerFrame; 

//for(VideoFrame * frm in imageArray) 
NSLog(@"**************************************************"); 
for(UIImage * img in imageArray) 
{ 
    //UIImage * img = frm._imageFrame; 
    buffer = [self pixelBufferFromCGImage:[img CGImage]]; 

    BOOL append_ok = NO; 
    int j = 0; 
    while (!append_ok && j < 30) { 
     if (adaptor.assetWriterInput.readyForMoreMediaData) { 
      //print out status: 
      NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]); 

      //CMTime frameTime = CMTimeMake((int64_t), (int32_t)2); 

      CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps); 
      NSLog(@"seconds = %f, %u, %d", CMTimeGetSeconds(frameTime),fps,j); 
      append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]; 
      if(!append_ok){ 
       NSError *error = videoWriter.error; 
       if(error!=nil) { 
        NSLog(@"Unresolved error %@,%@.", error, [error userInfo]); 
       } 
      } 
     } 
     else { 
      printf("adaptor not ready %d, %d\n", frameCount, j); 
      [NSThread sleepForTimeInterval:0.1]; 
     } 
     j++; 
    } 
    if (!append_ok) { 
     printf("error appending image %d times %d\n, with error.", frameCount, j); 
    } 
    frameCount++; 
} 
NSLog(@"**************************************************"); 

//Finish the session: 
[videoWriterInput markAsFinished]; 
[videoWriter finishWriting]; 
NSLog(@"Write Ended"); 

} 


-(void)CompileFilestomakeVideo 
{ 

    // set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists... 
    //NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4"; 
    NSString *documentsDirectory = [NSHomeDirectory() 
            stringByAppendingPathComponent:@"Documents"]; 
    NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"]; 
    //NSLog(@"-->videoOutputPath= %@", videoOutputPath); 
    // get rid of existing mp4 if exists... 

    AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

    NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath]; 
    // audio input file... 
    NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"]; 
    NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath]; 

    // this is the video file that was just written above, full path to file is in --> videoOutputPath 
    NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath]; 

    // create the final video output file as MOV file - may need to be MP4, but this works so far... 
    NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"]; 
    NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath]; 

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) 
     [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; 

    CMTime nextClipStartTime = kCMTimeZero; 

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil]; 
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); 
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 

    //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration); 

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil]; 
    CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration); 
    AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
    _assetExport.outputFileType = @"com.apple.quicktime-movie"; 
    //_assetExport.outputFileType = @"public.mpeg-4"; 
    //NSLog(@"support file types= %@", [_assetExport supportedFileTypes]); 
    _assetExport.outputURL = outputFileUrl; 

    [_assetExport exportAsynchronouslyWithCompletionHandler: 
    ^(void) { 
     [self saveVideoToAlbum:outputFilePath]; 
    } 
    ]; 

    ///// THAT IS IT DONE... the final video file will be written here... 
    NSLog(@"DONE.....outputFilePath--->%@", outputFilePath); 

    // the final video file will be located somewhere like here: 
    // /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov 


    //////////////////////////////////////////////////////////////////////////// 
    //////////////////////////////////////////////////////////////////////////// 
} 
- (void) saveVideoToAlbum:(NSString*)path { 

    NSLog(@"saveVideoToAlbum"); 

    if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){ 
     UISaveVideoAtPathToSavedPhotosAlbum (path, self, @selector(video:didFinishSavingWithError: contextInfo:), nil); 
    } 
} 

-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo { 
    if(error) 
     NSLog(@"error: %@", error); 
    else 
     NSLog(@" OK"); 
} 



//////////////////////// 
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image { 

    CGSize size = CGSizeMake(400, 200); 

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
          [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
          [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, 
          nil]; 
    CVPixelBufferRef pxbuffer = NULL; 

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, 
              size.width, 
              size.height, 
              kCVPixelFormatType_32ARGB, 
              (__bridge CFDictionaryRef) options, 
              &pxbuffer); 
    if (status != kCVReturnSuccess){ 
     NSLog(@"Failed to create pixel buffer"); 
    } 

    CVPixelBufferLockBaseAddress(pxbuffer, 0); 
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, 
               size.height, 8, 4*size.width, rgbColorSpace, 
               kCGImageAlphaPremultipliedFirst); 
    //kCGImageAlphaNoneSkipFirst); 
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0)); 
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
              CGImageGetHeight(image)), image); 
    CGColorSpaceRelease(rgbColorSpace); 
    CGContextRelease(context); 

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

    return pxbuffer; 
} 
1

Sử dụng mã Dưới

 

- (void)creatingVideo { //get full path of video file from documents directory NSError *error = nil; NSFileManager *fileMgr = [NSFileManager defaultManager]; NSString *documentsDirectory = [self applicationDocumentsDirectory]; NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mov"]; // get rid of existing mp4 if exists... if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES) NSLog(@"Unable to delete file it does not exits on path"); //size of the video frame CGSize imageSize = CGSizeMake(640,480); //CGSize imageSize = CGSizeMake(1280, 720); //frame per second NSUInteger fps = 30; NSLog(@"Start building video from defined frames."); //AvAsset library to create video of images AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(videoWriter); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,nil]; AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes]; NSParameterAssert(videoWriterInput); NSParameterAssert([videoWriter canAddInput:videoWriterInput]); videoWriterInput.expectsMediaDataInRealTime = YES; [videoWriter addInput:videoWriterInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; CVPixelBufferRef buffer = NULL; //frameCount. int frameCount = 0; double frameDuration; double numberOfSecondsPerFrame = appDelegate.delaySecond; NSLog(@"**************************video creation started********************************"); for (int i = 0; i<[self.arrImageDataDict count]; i++) { { @autoreleasepool{ UIImage *img1 = nil; img1 = [self getImageForVideoCreation:i]; buffer = [self pixelBufferFromCGImage: [img1 CGImage]]; if (buffer == NULL) { NSLog(@"Pixel buffer not created"); } else { BOOL append_ok = NO; int j = 0; while (!append_ok && j < 20) { if (adaptor.assetWriterInput.readyForMoreMediaData) { //print out status: NSLog(@"Processing video frame (%d,%d) delay %f",frameCount,[self.arrImageDataDict count],numberOfSecondsPerFrame); frameDuration = fps * numberOfSecondsPerFrame; CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps); append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]; if(!append_ok){ NSError *error = videoWriter.error; if(error!=nil) { NSLog(@"Unresolved error %@,%@.", error, [error userInfo]); } } } else { printf("adaptor not ready %d, %d\n", frameCount, j); [NSThread sleepForTimeInterval:0.1]; } j++; } if (!append_ok) { printf("error appending image %d times %d\n, with error.", frameCount, j); } frameCount++; CVPixelBufferRelease(buffer); buffer = nil; } } } } //Finish the session: [videoWriterInput markAsFinished]; //get the iOS version of the device float version = [[[UIDevice currentDevice] systemVersion] floatValue]; if (version < 6.0) { [videoWriter finishWriting]; //NSLog (@"finished writing iOS version:%f",version); } else { [videoWriter finishWritingWithCompletionHandler:^(){ //NSLog (@"finished writing iOS version:%f",version); }]; } CVPixelBufferPoolRelease(adaptor.pixelBufferPool); [videoWriter release]; [videoWriterInput release]; //OK now add an audio file to move file AVMutableComposition* mixComposition = [AVMutableComposition composition]; //Get the saved audio song path to merge it in video NSURL *audio_inputFileUrl ; NSString *filePath = [self applicationDocumentsDirectory]; NSString *outputFilePath1 = [filePath stringByAppendingPathComponent:@"mySong.m4a"]; audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1]; // this is the video file that was just written above NSURL *video_inputFileUrl = [[NSURL alloc]initFileURLWithPath:videoOutputPath];; [NSThread sleepForTimeInterval:2.0]; // create the final video output file as MOV file - may need to be MP4, but this works so far... NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"Slideshow_video.mov"]; NSURL *outputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath]; if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; //AVURLAsset get video without audio AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil]; CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; [videoAsset release]; [NSThread sleepForTimeInterval:3.0]; //If audio song merged if (![self.appDelegate.musicFilePath isEqualToString:@"Not set"]) { //*************************make sure all exception is off*********************** AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil]; CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration); AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; if (![audioAsset tracksWithMediaType:AVMediaTypeAudio].count == 0) { [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; } [audioAsset release]; } // Cleanup, in both success and fail cases [audio_inputFileUrl release]; [video_inputFileUrl release]; [NSThread sleepForTimeInterval:0.1]; //AVAssetExportSession to export the video AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; _assetExport.outputFileType = AVFileTypeQuickTimeMovie; _assetExport.outputURL = outputFileUrl; [_assetExport exportAsynchronouslyWithCompletionHandler:^(void){ switch (_assetExport.status) { case AVAssetExportSessionStatusCompleted: #if !TARGET_IPHONE_SIMULATOR [self writeVideoToPhotoLibrary:outputFileUrl]; #endif [self RemoveSlideshowImagesInTemp]; [self removeAudioFileFromDocumentsdirectory:outputFilePath1]; [self removeAudioFileFromDocumentsdirectory:videoOutputPath]; [outputFileUrl release]; [_assetExport release]; //NSLog(@"AVAssetExportSessionStatusCompleted"); dispatch_async(dispatch_get_main_queue(), ^{ if (alrtCreatingVideo && alrtCreatingVideo.visible) { [alrtCreatingVideo dismissWithClickedButtonIndex:alrtCreatingVideo.firstOtherButtonIndex animated:YES]; [databaseObj isVideoCreated:appDelegate.pro_id]; [self performSelector:@selector(successAlertView) withObject:nil afterDelay:0.0]; } }); break; case AVAssetExportSessionStatusFailed: NSLog(@"Failed:%@",_assetExport.error); break; case AVAssetExportSessionStatusCancelled: NSLog(@"Canceled:%@",_assetExport.error); break; default: break; } }]; } //writeVideoToPhotoLibrary - (void)writeVideoToPhotoLibrary:(NSURL *)url { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; [library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){ if (error) { NSLog(@"Video could not be saved"); } }]; [library release]; }
Các vấn đề liên quan