2016-04-13 41 views
7

Voglio realizzare un videoregistratore personalizzato nella mia app. Per ora posso registrare il video e salvarlo, ma voglio aggiungere filtri al video quando si registra e salvare il video con un nuovo filtro per album di foto. Questo è il mio codice per registrare video e salvarlo.Registra video con AVCaptureSession, aggiungi CIFilter e salvalo nell'album fotografico

let captureSession = AVCaptureSession() 
let fileOutput = AVCaptureMovieFileOutput() 

func initVideoRecording() { 



    do { 
     try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord) 
     try AVAudioSession.sharedInstance().setActive(true) 
    }catch { 
     print("error in audio") 
    } 

    let session = AVCaptureSession() 

    session.beginConfiguration() 

    session.sessionPreset = AVCaptureSessionPresetMedium 

    let videoLayer = AVCaptureVideoPreviewLayer(session: session) 
    videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
    videoLayer.frame = myImage.bounds 
    myImage.layer.addSublayer(videoLayer) 

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) 
    let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) 
    do 
    { 
     let input = try AVCaptureDeviceInput(device: backCamera) 
     let audioInput = try AVCaptureDeviceInput(device: audio) 

     session.addInput(input) 
     session.addInput(audioInput) 

    } 
    catch 
    { 
     print("can't access camera") 
     return 
    } 

    session.addOutput(fileOutput) 

    session.commitConfiguration() 

    session.startRunning() 

} 

@IBAction func recordFunc() { 
     if fileOutput.recording { 
      myButton.setTitle("record", forState: .Normal) 
      fileOutput.stopRecording() 
     }else{ 
      let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4") 
     fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self) 

     myButton.setTitle("stop", forState: .Normal) 

    } 
} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { 

//to save record video to photos album 

    UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil) 


} 

cerco di usare AVCaptureVideoDataOutput

E nel suo delegato Io uso questo codice

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 


      connection.videoOrientation = AVCaptureVideoOrientation.Portrait 
      let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 
      let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) 

      let comicEffect = CIFilter(name: "CIComicEffect") 

      comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) 

      let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) 


      dispatch_async(dispatch_get_main_queue()) 
      { 
       self.myImage.image = filteredImage 

      } 

     } 

Con questo codice solo visualizzare il filtro, ma non registra esso.

=======================/questa è la soluzione per la mia domanda \ ============= === si prega di non che questo uso codice swift 2 e Xcode 7,3

let captureSession = AVCaptureSession() 
    let videoOutput = AVCaptureVideoDataOutput() 
    let audioOutput = AVCaptureAudioDataOutput() 

    var adapter:AVAssetWriterInputPixelBufferAdaptor! 
    var record = false 
    var videoWriter:AVAssetWriter! 
    var writerInput:AVAssetWriterInput! 
    var audioWriterInput:AVAssetWriterInput! 
    var lastPath = "" 
    var starTime = kCMTimeZero 

    var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height) 

override func viewDidAppear(animated: Bool) { 
     super.viewDidAppear(animated) 

     video() 
    } 

    func video() { 

     do { 
      try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord) 
      try AVAudioSession.sharedInstance().setActive(true) 
     }catch { 
      print("error in audio") 
     } 

     captureSession.beginConfiguration() 

     captureSession.sessionPreset = AVCaptureSessionPresetMedium 

     let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
     videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
     //videoLayer.frame = myImage.bounds 
     //myImage.layer.addSublayer(videoLayer) 

     view.layer.addSublayer(videoLayer) 

     let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) 
     let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) 
     do 
     { 
      let input = try AVCaptureDeviceInput(device: backCamera) 
      let audioInput = try AVCaptureDeviceInput(device: audio) 

      captureSession.addInput(input) 
      captureSession.addInput(audioInput) 

     } 
     catch 
     { 
      print("can't access camera") 
      return 
     } 

     let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL) 

     videoOutput.setSampleBufferDelegate(self,queue: queue) 
     audioOutput.setSampleBufferDelegate(self, queue: queue) 

     captureSession.addOutput(videoOutput) 
     captureSession.addOutput(audioOutput) 
     captureSession.commitConfiguration() 

     captureSession.startRunning() 

    } 


    @IBAction func recordFunc() { 

     if record { 
      myButton.setTitle("record", forState: .Normal) 
      record = false 
      self.writerInput.markAsFinished() 
      audioWriterInput.markAsFinished() 
      self.videoWriter.finishWritingWithCompletionHandler {() -> Void in 
       print("FINISHED!!!!!") 
       UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil) 
      } 


     }else{ 

      let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4") 

      lastPath = fileUrl.path! 
      videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4) 



      let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))] 

      writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings) 
      writerInput.expectsMediaDataInRealTime = true 
      audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject]) 

      videoWriter.addInput(writerInput) 
      videoWriter.addInput(audioWriterInput) 

      adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject]) 









      videoWriter.startWriting() 
      videoWriter.startSessionAtSourceTime(starTime) 

      record = true 
      myButton.setTitle("stop", forState: .Normal) 

     } 


    } 

    func getCurrentDate()->String{ 
     let format = NSDateFormatter() 
     format.dateFormat = "dd-MM-yyyy hh:mm:ss" 
     format.locale = NSLocale(localeIdentifier: "en") 
     let date = format.stringFromDate(NSDate()) 
     return date 
    } 


extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{ 


    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
     starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) 

     if captureOutput == videoOutput { 
      connection.videoOrientation = AVCaptureVideoOrientation.Portrait 

      let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 
      let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) 

      let comicEffect = CIFilter(name: "CIHexagonalPixellate") 

      comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) 

      let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) 
      //let filteredImage = UIImage(CIImage: cameraImage) 
      if self.record == true{ 

       dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), { 
        if self.record == true{ 
         if self.writerInput.readyForMoreMediaData { 
         let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime) 

         print("video is \(bo)") 
         } 
        } 
       }) 
      } 
      dispatch_async(dispatch_get_main_queue()) 
      { 
       self.myImage.image = filteredImage 

      } 
     }else if captureOutput == audioOutput{ 

      if self.record == true{ 

       let bo = audioWriterInput.appendSampleBuffer(sampleBuffer) 
       print("audio is \(bo)") 
      } 
     } 



    } 


    func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! { 
     let context:CIContext? = CIContext(options: nil) 
     if context != nil { 
      return context!.createCGImage(inputImage, fromRect: inputImage.extent) 
     } 
     return nil 
    } 

    func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) { 
     var title = "Success" 
     var message = "Video was saved" 

     if let saveError = error { 
      title = "Error" 
      message = "Video failed to save" 
     } 

     let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert) 
     alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil)) 
     presentViewController(alert, animated: true, completion: nil) 
    } 

questi metodi questo è in DejalActivityView là in Objective C e non ho potuto convertirlo in Swift così se uno può convertirlo si prega di modificare il mio codice e convertirlo

+ (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size 
{ 
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
          [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
          [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; 
    CVPixelBufferRef pxbuffer = NULL; 
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer); 
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer); 

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0); 
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 
    NSParameterAssert(pxdata != NULL); 

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); 
    NSParameterAssert(context); 

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); 

    CGColorSpaceRelease(rgbColorSpace); 
    CGContextRelease(context); 

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

    return pxbuffer; 
} 

+(NSDictionary *)getAdapterDictionary{ 


    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: 
                  [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; 

    return sourcePixelBufferAttributesDictionary; 
} 

+(NSDictionary *) getAudioDictionary{ 
    AudioChannelLayout acl; 
    bzero(&acl, sizeof(acl)); 
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 


    NSDictionary* audioOutputSettings = nil; 
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: 
          [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, 
          //[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey, 
          [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, 
          [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, 
          [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey, 
          nil ]; 
// NSDictionary* audioOutputSettings = nil; 
//  audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: 
//        [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey, 
//        [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey, 
//        [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey, 
//        nil ]; 

    return audioOutputSettings; 
} 
+0

Se utilizzo questo metodo, la durata del video che viene salvata è espressa in ore. –

risposta

2

È necessario per aggiungere un AVAssetWriter

var videoRecorder: AVAssetWriter? 

Poi, nel tuo delegato di callback:

let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) 

if videoRecorder?.status == .Unknown { 
    startRecordingTime = timeStamp 
    videoRecorder?.startWriting() 
    videoRecorder?.startSessionAtSourceTime(timeStamp) 
} 

Sarà necessario configurare il registratore per ogni registrazione si desidera fare, si dovrà anche aggiungere gli ingressi al registratore .

Si può iniziare a riscontrare problemi poiché non sembra che tu abbia ancora impostato le code di cui avrai bisogno ma per riferimento questo Github è un'ottima risorsa per questo.

https://github.com/waleedka/rosywriterswift

EDIT: Altre Informazioni

devi init() lo scrittore quindi aggiungere ingressi AVAssetWriterInput per video/audio.

+0

grazie per la tua risposta. ma non riesco a capire come potrei usare AVAssetWriter nel mio codice –

+0

non è qualcosa di semplice, ma hai chiaramente ottenuto quel codice che hai finora quindi dovresti essere in grado di lavorare il resto dal link – SeanLintern88

+0

la tua risposta è stata molto utile per me . faccio qualche ricerca su google e stack overflow ma non ho capito niente da questo link [link] (https://github.com/waleedka/rosywriterswift) ma la tua risposta è stata molto utile quindi la contrassegnerò come accettata e posterò la soluzione completa che ha trovato nella domanda principale –