Tôi muốn tạo trình ghi video tùy chỉnh trong ứng dụng của mình. Hiện tại tôi có thể quay video và lưu video, nhưng tôi muốn thêm bộ lọc vào video khi ghi và lưu video bằng bộ lọc mới vào album ảnh. Đây là mã của tôi để quay video và lưu nó.Quay video với AVCaptureSession, thêm CIFilter vào nó và lưu nó vào album ảnh
let captureSession = AVCaptureSession()
let fileOutput = AVCaptureMovieFileOutput()
func initVideoRecording() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
try AVAudioSession.sharedInstance().setActive(true)
}catch {
print("error in audio")
}
let session = AVCaptureSession()
session.beginConfiguration()
session.sessionPreset = AVCaptureSessionPresetMedium
let videoLayer = AVCaptureVideoPreviewLayer(session: session)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoLayer.frame = myImage.bounds
myImage.layer.addSublayer(videoLayer)
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do
{
let input = try AVCaptureDeviceInput(device: backCamera)
let audioInput = try AVCaptureDeviceInput(device: audio)
session.addInput(input)
session.addInput(audioInput)
}
catch
{
print("can't access camera")
return
}
session.addOutput(fileOutput)
session.commitConfiguration()
session.startRunning()
}
@IBAction func recordFunc() {
if fileOutput.recording {
myButton.setTitle("record", forState: .Normal)
fileOutput.stopRecording()
}else{
let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4")
fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)
myButton.setTitle("stop", forState: .Normal)
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
//to save record video to photos album
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil)
}
tôi cố gắng sử dụng AVCaptureVideoDataOutput
Và trong đại biểu của mình tôi sử dụng mã này
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
connection.videoOrientation = AVCaptureVideoOrientation.Portrait
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
let comicEffect = CIFilter(name: "CIComicEffect")
comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)
let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
dispatch_async(dispatch_get_main_queue())
{
self.myImage.image = filteredImage
}
}
Với mã này nó chỉ hiển thị các bộ lọc nhưng không ghi lại nó.
=======================/đây là giải pháp cho câu hỏi của tôi \ ============= === xin vui lòng không rằng việc sử dụng mã này nhanh chóng 2 và Xcode 7,3
let captureSession = AVCaptureSession()
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
var adapter:AVAssetWriterInputPixelBufferAdaptor!
var record = false
var videoWriter:AVAssetWriter!
var writerInput:AVAssetWriterInput!
var audioWriterInput:AVAssetWriterInput!
var lastPath = ""
var starTime = kCMTimeZero
var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height)
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
video()
}
func video() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
try AVAudioSession.sharedInstance().setActive(true)
}catch {
print("error in audio")
}
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetMedium
let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
//videoLayer.frame = myImage.bounds
//myImage.layer.addSublayer(videoLayer)
view.layer.addSublayer(videoLayer)
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do
{
let input = try AVCaptureDeviceInput(device: backCamera)
let audioInput = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(input)
captureSession.addInput(audioInput)
}
catch
{
print("can't access camera")
return
}
let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)
videoOutput.setSampleBufferDelegate(self,queue: queue)
audioOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(videoOutput)
captureSession.addOutput(audioOutput)
captureSession.commitConfiguration()
captureSession.startRunning()
}
@IBAction func recordFunc() {
if record {
myButton.setTitle("record", forState: .Normal)
record = false
self.writerInput.markAsFinished()
audioWriterInput.markAsFinished()
self.videoWriter.finishWritingWithCompletionHandler {() -> Void in
print("FINISHED!!!!!")
UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil)
}
}else{
let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4")
lastPath = fileUrl.path!
videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4)
let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))]
writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
writerInput.expectsMediaDataInRealTime = true
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject])
videoWriter.addInput(writerInput)
videoWriter.addInput(audioWriterInput)
adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject])
videoWriter.startWriting()
videoWriter.startSessionAtSourceTime(starTime)
record = true
myButton.setTitle("stop", forState: .Normal)
}
}
func getCurrentDate()->String{
let format = NSDateFormatter()
format.dateFormat = "dd-MM-yyyy hh:mm:ss"
format.locale = NSLocale(localeIdentifier: "en")
let date = format.stringFromDate(NSDate())
return date
}
extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if captureOutput == videoOutput {
connection.videoOrientation = AVCaptureVideoOrientation.Portrait
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
let comicEffect = CIFilter(name: "CIHexagonalPixellate")
comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)
let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
//let filteredImage = UIImage(CIImage: cameraImage)
if self.record == true{
dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), {
if self.record == true{
if self.writerInput.readyForMoreMediaData {
let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime)
print("video is \(bo)")
}
}
})
}
dispatch_async(dispatch_get_main_queue())
{
self.myImage.image = filteredImage
}
}else if captureOutput == audioOutput{
if self.record == true{
let bo = audioWriterInput.appendSampleBuffer(sampleBuffer)
print("audio is \(bo)")
}
}
}
func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
let context:CIContext? = CIContext(options: nil)
if context != nil {
return context!.createCGImage(inputImage, fromRect: inputImage.extent)
}
return nil
}
func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) {
var title = "Success"
var message = "Video was saved"
if let saveError = error {
title = "Error"
message = "Video failed to save"
}
let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil))
presentViewController(alert, animated: true, completion: nil)
}
những phương pháp thats trong DejalActivityView có trong khách quan c và tôi không thể chuyển nó sang Swift vì vậy nếu bất kỳ ai có thể chuyển đổi nó vui lòng chỉnh sửa mã của tôi và chuyển đổi mã đó
+ (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
+(NSDictionary *)getAdapterDictionary{
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
return sourcePixelBufferAttributesDictionary;
}
+(NSDictionary *) getAudioDictionary{
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
//[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,
nil ];
// NSDictionary* audioOutputSettings = nil;
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey,
// [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
// [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,
// nil ];
return audioOutputSettings;
}
Nếu tôi sử dụng thời lượng video tiếp cận này được lưu trong giờ. –