2013-08-06 41 views
9

Tôi phải triển khai chức năng để tạm dừng và tiếp tục quay video trong một phiên duy nhất, nhưng có từng phân đoạn mới (các phân đoạn đã chụp sau mỗi lần tạm dừng) được thêm vào cùng một tệp video , với AVFoundation. Hiện tại, mỗi lần tôi nhấn "stop" rồi "record" lại, nó chỉ lưu một tập tin video mới vào thư mục Document của iPhone và bắt đầu ghi vào một tập tin mới. Tôi cần có thể nhấn nút "ghi/dừng", chỉ ghi lại video & âm thanh khi bản ghi đang hoạt động ... khi nhấn nút "thực hiện", có một tệp AV duy nhất với tất cả các phân đoạn cùng nhau. Và tất cả điều này cần phải xảy ra trong cùng một phiên chụp/phiên xem trước.Tạm dừng và tiếp tục quay video bằng cách sử dụng AVCaptureMovieFileOutput và AVCaptureVideoDataOutput trong iOS

Tôi không sử dụng AVAssetWriterInput.

Cách duy nhất tôi có thể nghĩ là thử khi nút "xong" được nhấn, lấy từng tệp đầu ra riêng lẻ và kết hợp chúng lại với nhau thành một tệp duy nhất.

Mã này hoạt động cho iOS 5 nhưng không dành cho iOS 6. Thực tế cho iOS 6, lần đầu tiên khi tôi tạm dừng quay (dừng ghi) AVCaptureFileOutputRecordingDelegate phương thức (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) được gọi nhưng sau đó khi tôi bắt đầu ghi đại biểu phương pháp (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) được gọi lại nhưng không được gọi tại thời điểm dừng ghi.

Tôi cần giải pháp cho vấn đề đó. Làm ơn giúp tôi.

//View LifeCycle 
- (void)viewDidLoad 
{ 
[super viewDidLoad]; 

self.finalRecordedVideoName = [self stringWithNewUUID]; 

arrVideoName = [[NSMutableArray alloc]initWithCapacity:0]; 
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0]; 

CaptureSession = [[AVCaptureSession alloc] init]; 


captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
if ([captureDevices count] > 0) 
{ 
    NSError *error; 
    VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error]; 
    if (!error) 
    { 
     if ([CaptureSession canAddInput:VideoInputDevice]) 
      [CaptureSession addInput:VideoInputDevice]; 
     else 
      NSLog(@"Couldn't add video input"); 
    } 
    else 
    { 
     NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 
    NSLog(@"Couldn't create video capture device"); 
} 



//ADD VIDEO PREVIEW LAYER 
NSLog(@"Adding video preview layer"); 
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession]; 

[self setPreviewLayer:layer]; 


UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation; 

NSLog(@"%d",currentOrientation); 

if (currentOrientation == UIDeviceOrientationPortrait) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; 
} 
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown; 
} 
else if (currentOrientation == UIDeviceOrientationLandscapeRight) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight; 
} 
else if (currentOrientation == UIDeviceOrientationLandscapeLeft) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft; 
} 

[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 

//ADD MOVIE FILE OUTPUT 
NSLog(@"Adding movie file output"); 
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; 

NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey; 
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 

[VideoDataOutput setVideoSettings:videoSettings]; 

Float64 TotalSeconds = 60;   //Total seconds 
int32_t preferredTimeScale = 30; //Frames per second 
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION 
MovieFileOutput.maxRecordedDuration = maxDuration; 
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

//SET THE CONNECTION PROPERTIES (output properties) 
[self CameraSetOutputProperties];   //(We call a method as it also has to be done after changing camera) 
AVCaptureConnection *videoConnection = nil; 

for (AVCaptureConnection *connection in [MovieFileOutput connections]) 
{ 
    NSLog(@"%@", connection); 
    for (AVCaptureInputPort *port in [connection inputPorts]) 
    { 
     NSLog(@"%@", port); 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
     } 
    } 
} 

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
{ 
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
} NSLog(@"Setting image quality"); 
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow]; 

//----- DISPLAY THE PREVIEW LAYER ----- 

CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348); 

[self.PreviewLayer setBounds:layerRect]; 
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))]; 

if ([CaptureSession canAddOutput:MovieFileOutput]) 
    [CaptureSession addOutput:MovieFileOutput]; 
    [CaptureSession addOutput:VideoDataOutput]; 
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front): 
CameraView = [[UIView alloc] init]; 
[videoPreviewLayer addSubview:CameraView]; 
[videoPreviewLayer sendSubviewToBack:CameraView]; 
[[CameraView layer] addSublayer:PreviewLayer]; 

//----- START THE CAPTURE SESSION RUNNING ----- 
[CaptureSession startRunning]; 
} 

#pragma mark - IBACtion Methods 
-(IBAction)btnStartAndStopPressed:(id)sender 
{ 
UIButton *StartAndStopButton = (UIButton*)sender; 
if ([StartAndStopButton isSelected] == NO) 
{ 
    [StartAndStopButton setSelected:YES]; 
    [btnPauseAndResume setEnabled:YES]; 
    [btnBack setEnabled:NO]; 
    [btnSwitchCameraInput setHidden:YES]; 

    NSDate *date = [NSDate date]; 
    NSLog(@" date %@",date); 

    NSArray *paths     = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *recordedFileName = nil; 
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date]; 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    self.outputPath     = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]]; 
    NSLog(@"%@",self.outputPath); 

    [arrVideoName addObject:recordedFileName]; 

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath]; 
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) 
    { 
     NSError *error; 
     if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
     } 
    } 
    //Start recording 
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
    recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(VideoRecording) userInfo:nil repeats:YES]; 

} 
else 
{ 
    [StartAndStopButton setSelected:NO]; 
    [btnPauseAndResume setEnabled:NO]; 
    [btnBack setEnabled:YES]; 
    [btnSwitchCameraInput setHidden:NO]; 

    NSLog(@"STOP RECORDING"); 
    WeAreRecording = NO; 

    [MovieFileOutput stopRecording]; 
    [((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:@"Please wait...."]; 

    if ([recordingTimer isValid]) 
    { 
     [recordingTimer invalidate]; 
     recordingTimer = nil; 
     recordingTime = 30; 
    } 

    stopRecording = YES; 
} 
} 

- (IBAction)btnPauseAndResumePressed:(id)sender 
{ 
UIButton *PauseAndResumeButton = (UIButton*)sender; 
if (PauseAndResumeButton.selected == NO) 
{ 
    PauseAndResumeButton.selected = YES; 
    NSLog(@"recording paused"); 
    WeAreRecording = NO; 

    [MovieFileOutput stopRecording]; 
    [self pauseTimer:recordingTimer]; 

    [btnStartAndStop setEnabled:NO]; 
    [btnBack setEnabled:YES]; 
    [btnSwitchCameraInput setHidden:NO]; 
} 
else 
{ 
    PauseAndResumeButton.selected = NO; 
    NSLog(@"recording resumed"); 

    [btnStartAndStop setEnabled:YES]; 
    [btnBack setEnabled:NO]; 
    [btnSwitchCameraInput setHidden:YES]; 

    WeAreRecording = YES; 

    NSDate *date = [NSDate date]; 
    NSLog(@" date %@",date); 

    NSArray *paths     = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES); 
    NSString *recordedFileName = nil; 
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date]; 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    self.outputPath     = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]]; 
    NSLog(@"%@",self.outputPath); 

    [arrVideoName addObject:recordedFileName]; 

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath]; 
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) 
    { 
     NSError *error; 
     if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
     } 
    } 
    [self resumeTimer:recordingTimer]; 
    //Start recording 
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
} 
} 

- (void) CameraSetOutputProperties 
{ 
//SET THE CONNECTION PROPERTIES (output properties) 
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 

[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait]; 
//Set frame rate (if requried) 
CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 

if (CaptureConnection.supportsVideoMinFrameDuration) 
    CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 
if (CaptureConnection.supportsVideoMaxFrameDuration) 
    CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 

CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 
} 

- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position 
{ 
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
for (AVCaptureDevice *Device in Devices) 
{ 
    if ([Device position] == Position) 
    { 
     NSLog(@"%d",Position); 
     return Device; 
    } 
} 
return nil; 
} 

#pragma mark - AVCaptureFileOutputRecordingDelegate Method 

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 

if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer]; 

for(AVCaptureConnection *captureConnection in [captureOutput connections]) 
{ 
    if ([captureConnection isVideoOrientationSupported]) 
    { 
     AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft; 
     [captureConnection setVideoOrientation:orientation]; 
    } 
}  

UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation]; 

CGAffineTransform t; 

if (curOr == UIDeviceOrientationPortrait) 
{ 
    t = CGAffineTransformMakeRotation(-M_PI/2); 
} 
else if (curOr == UIDeviceOrientationPortraitUpsideDown) 
{ 
    t = CGAffineTransformMakeRotation(M_PI/2); 
} 
else if (curOr == UIDeviceOrientationLandscapeRight) 
{ 
    t = CGAffineTransformMakeRotation(M_PI); 
} 
else 
{ 
    t = CGAffineTransformMakeRotation(0); 
} 
} 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error 
{ 
NSLog(@"didFinishRecordingToOutputFileAtURL - enter"); 
NSLog(@"output file url : %@", [outputFileURL absoluteString]); 

BOOL RecordedSuccessfully = YES; 
if ([error code] != noErr) 
{ 
    // A problem occurred: Find out if the recording was successful. 
    id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey]; 
    if (value) 
    { 
     RecordedSuccessfully = [value boolValue]; 
    } 
} 
AVCaptureConnection *videoConnection=nil; 
for (AVCaptureConnection *connection in [MovieFileOutput connections]) 
{ 
    NSLog(@"%@", connection); 
    for (AVCaptureInputPort *port in [connection inputPorts]) 
    { 
     NSLog(@"%@", port); 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
     } 
    } 
} 

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
{ 
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
} NSLog(@"Setting image quality"); 

NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL]; 
[videoData writeToFile:self.outputPath atomically:NO]; 

[arrOutputUrl addObject:outputFileURL]; 

if (stopRecording) 
{ 
    [self mergeMultipleVideo]; 
} 
} 

//Method to merge multiple audios 
-(void)mergeMultipleVideo 
{ 
mixComposition = [AVMutableComposition composition]; 

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

CMTime nextClipStartTime = kCMTimeZero; 
NSLog(@"Array of output file url : %@", arrOutputUrl); 
if (arrOutputUrl.count > 0) 
{ 
    for(int i = 0 ;i < [arrOutputUrl count];i++) 
    { 
     AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil]; 

     CMTimeRange timeRangeInAsset; 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]); 

     [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    } 
} 

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",self.finalRecordedVideoName]]; 
NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
exportSession.outputURL=url; 
exportSession.outputFileType = AVFileTypeQuickTimeMovie; 
exportSession.shouldOptimizeForNetworkUse = YES; 
[exportSession exportAsynchronouslyWithCompletionHandler:^{ 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     [self exportDidFinish:exportSession path:myPathDocs]; 
    }); 
}]; 
} 

-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath 
{ 
NSLog(@"session.status : %d",session.status); 
if (session.status == AVAssetExportSessionStatusCompleted) 
{ 
    NSURL *outputURL = session.outputURL; 

    NSData *videoData = [NSData dataWithContentsOfURL:outputURL]; 
    [videoData writeToFile:outputVideoPath atomically:NO]; 

    if ([arrVideoName count] > 0) 
    { 
     for (int i = 0; i < [arrVideoName count]; i++) 
     { 
      NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
      NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:@"%@",[arrVideoName objectAtIndex:i]]]; 

      NSLog(@"Full path of file to be deleted: %@",fullFilePath); 

      NSFileManager *fileManager = [NSFileManager defaultManager]; 
      NSError *error; 

      if ([fileManager fileExistsAtPath:fullFilePath]) 
      { 
       [fileManager removeItemAtPath:fullFilePath error:&error]; 
      } 
     } 
     [arrVideoName removeAllObjects]; 
    } 
    if (arrOutputUrl.count > 0) 
    { 
     [arrOutputUrl removeAllObjects]; 
    } 
    [((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view]; 
    [self.view addSubview:afterRecordingPopupView]; 
} 
} 
+0

Bạn đã nhận được giải pháp của mình chưa? –

Trả lời

0

Xem thuộc tính đã bật của AVCaptureConnection. Đối với kết nối đầu ra của bạn, đặt được bật thành NO thay vì dừng phiên.

+0

Tôi đã thử. Nhưng khi tôi thiết lập kích hoạt của tôi AVCaptureConnection để NO, - (void) captureOutput: (AVCaptureFileOutput *) captureOutput didFinishRecordingToOutputFileAtURL: (NSURL *) outputFileURL fromConnections: (NSArray *) kết nối lỗi: (NSError *) lỗi được gọi là đó không phải là những gì tôi muốn. –

Các vấn đề liên quan