2013-06-14 16 views
9

sto cercando di implementare la funzionalità come qui di seguitoregistrazione video in iPhone SDK programatically

finale video registrato = "Capture un video dalla fotocamera frontale + Registra un audio dal video (che sto giocando tramite lettore video) ".

Per ulteriori informazioni, vedere la schermata di collegamento.

enter image description here

Usando i miei blocchi di codici che è il seguente: Alla fine quello che ottiene è un video, ma senza audio.

Ma quello che voglio cercando di attuare è "finale il video registrato che deve essere combinazione di: 'Un video che viene catturato dalla mia fotocamera frontale + registrare solo audio da file video che ho Sto giocando. '"

Qualcuno può aiutarmi o guidarmi come posso ottenere la funzionalità di cui sopra. Qualsiasi aiuto sarà apprezzato.

Questo è il mio codice.

"registrazione" metodo Button Click è la seguente:

-(void) startRecording 
{ 
    [self initCaptureSession]; 

    NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] 
             pathForResource:@"video" 
             ofType:@"mp4"]]; 
    [self playMovieAtURL:url]; 

    [self startVideoRecording]; 
} 

"initCaptureSession": Utilizzando questo metodo sto registrando un video utilizzando la fotocamera frontale utilizzando "AVCaptureSession"

-(void) initCaptureSession 
{ 
    NSLog(@"Setting up capture session"); 
    captureSession = [[AVCaptureSession alloc] init]; 

    NSLog(@"Adding video input"); 

    AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ]; 

    if (VideoDevice) 
    { 
     NSError *error; 
     videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error]; 
     if (!error) 
     { 
      if ([captureSession canAddInput:videoInputDevice]) 
      { 
       [captureSession addInput:videoInputDevice]; 
      } 
      else 
      { 
       NSLog(@"Couldn't add video input"); 
      } 
     } 
     else 
     { 
      NSLog(@"Couldn't create video input"); 
     } 
    } 
    else 
    { 
     NSLog(@"Couldn't create video capture device"); 
    } 


    NSLog(@"Adding audio input"); 
    AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed]; 
    NSError *error = nil; 
    AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error]; 
    if (audioInput) 
    { 
     [captureSession addInput:audioInput]; 
    } 


    NSLog(@"Adding movie file output"); 
    movieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 

    movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

    if ([captureSession canAddOutput:movieFileOutput]) 
     [captureSession addOutput:movieFileOutput]; 

    [self CameraSetOutputProperties];   //(We call a method as it also has to be done after changing camera) 

    NSLog(@"Setting image quality"); 
    [captureSession setSessionPreset:AVCaptureSessionPresetMedium]; 
    if ([captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])  //Check size based configs are supported before setting them 
     [captureSession setSessionPreset:AVCaptureSessionPreset640x480]; 

    [captureSession startRunning]; 
} 

- (void) CameraSetOutputProperties 
{ 
    AVCaptureConnection *CaptureConnection=nil; 

    NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: @"5.0.0" options: NSNumericSearch]; 
    if (order == NSOrderedSame || order == NSOrderedDescending) { 
     // OS version >= 5.0.0 
     CaptureConnection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 
    } else { 
     // OS version < 5.0.0 
     CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[movieFileOutput connections]]; 

    } 

    //Set landscape (if required) 
    if ([CaptureConnection isVideoOrientationSupported]) 
    { 
     AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;// AVCaptureVideoOrientationLandscapeRight;  //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE 
     [CaptureConnection setVideoOrientation:orientation]; 
    } 

    } 

"- (void) playMovieAtURL: (NSURL *) theURL" Utilizzando questo metodo sto giocando g un video

-(void) playMovieAtURL: (NSURL*) theURL 
{ 

player = 
[[MPMoviePlayerController alloc] initWithContentURL: theURL ]; 
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil]; 

player.scalingMode = MPMovieScalingModeAspectFill; 
player.controlStyle = MPMovieControlStyleNone; 
[player prepareToPlay]; 

[[NSNotificationCenter defaultCenter] 
addObserver: self 
selector: @selector(myMovieFinishedCallback:) 
name: MPMoviePlayerPlaybackDidFinishNotification 
object: player]; 
player.view.frame=CGRectMake(10, 30, 300, 200); 
[self.view addSubview:player.view]; 

[player play]; 
} 

"startVideoRecording" con questo metodo che ho iniziato a registrare il video finale.

- (void) startVideoRecording 
{ 
    //Create temporary URL to record to 
    NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"]; 
    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath]; 
    NSFileManager *fileManager = [NSFileManager defaultManager]; 
    if ([fileManager fileExistsAtPath:outputPath]) 
    { 
     NSError *error; 
     if ([fileManager removeItemAtPath:outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
      NSLog(@"file remove error"); 
     } 
    } 
    //Start recording 
    [movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 

} 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput 
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL 
     fromConnections:(NSArray *)connections 
       error:(NSError *)error 
{ 

    NSLog(@"didFinishRecordingToOutputFileAtURL - enter"); 

    BOOL RecordedSuccessfully = YES; 
    if ([error code] != noErr) 
    { 
     // A problem occurred: Find out if the recording was successful. 
     id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey]; 
     if (value) 
     { 
      RecordedSuccessfully = [value boolValue]; 
     } 
    } 
    if (RecordedSuccessfully) 
    { 
     //----- RECORDED SUCESSFULLY ----- 
     NSLog(@"didFinishRecordingToOutputFileAtURL - success"); 
     ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
     if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) 
     { 
      [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL 
             completionBlock:^(NSURL *assetURL, NSError *error) 
      { 
       if (error) 
       { 
        NSLog(@"File save error"); 
       } 
       else 
       { 
        recordedVideoURL=assetURL; 
       } 
      }]; 
     } 
     else 
     { 

      NSString *assetURL=[self copyFileToDocuments:outputFileURL]; 
      if(assetURL!=nil) 
      { 
       recordedVideoURL=[NSURL URLWithString:assetURL]; 
      } 
     } 
    } 
} 
+0

Controlla questo link se aiuta http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios –

+0

+1 per la formattazione della risposta piacevole :) –

+0

Ciao ... è quello puoi registrare anche live streaming rtsp? – Anny

risposta

4

// Aggiungere un po 'di codice aggiuntivo per le seguenti metodi "1 ° Method"

-(void) playMovieAtURL: (NSURL*) theURL 

    { 
     [player play]; 
     AVAudioSession *audioSession = [AVAudioSession sharedInstance]; 
     NSError *err = nil; 
     [audioSession setCategory :AVAudioSessionCategoryPlayAndRecord error:&err]; 
    if(err) 
     { 
     NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo]  description]); 
     return; 
    } 
     [audioSession setActive:YES error:&err]; 
     err = nil; 
    if(err){ 
     NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]); 
     return; 
    } 

     recordSetting = [[NSMutableDictionary alloc] init]; 

     [recordSetting setValue :[NSNumber numberWithInt:kAudioFormatAppleIMA4] forKey:AVFormatIDKey]; 
     [recordSetting setValue:[NSNumber numberWithFloat:16000.0] forKey:AVSampleRateKey]; 
     [recordSetting setValue:[NSNumber numberWithInt: 1] forKey:AVNumberOfChannelsKey]; 
     recorderFilePath = [NSString stringWithFormat:@"%@/MySound.caf", DOCUMENTS_FOLDER]; 
    NSLog(@"recorderFilePath: %@",recorderFilePath); 
    audio_url = [NSURL fileURLWithPath:recorderFilePath]; 
    err = nil; 
    NSData *audioData = [NSData dataWithContentsOfFile:[audio_url path] options: 0 error:&err]; 
    if(audioData) 
    { 
     NSFileManager *fm = [NSFileManager defaultManager]; 
     [fm removeItemAtPath:[audio_url path] error:&err]; 
    } 

    err = nil; 
    recorder = [[ AVAudioRecorder alloc] initWithURL:audio_url settings:recordSetting error:&err]; 
    if(!recorder) 
    { 
     NSLog(@"recorder: %@ %d %@", [err domain], [err code], [[err userInfo] description]); 
     UIAlertView *alert = 
     [[UIAlertView alloc] initWithTitle: @"Warning" 
            message: [err localizedDescription] 
            delegate: nil 
         cancelButtonTitle:@"OK" 
         otherButtonTitles:nil]; 
     [alert show]; 
     return; 
    } 

    //prepare to record 
    [recorder setDelegate:self]; 
    [recorder prepareToRecord]; 
    recorder.meteringEnabled = YES; 

    BOOL audioHWAvailable = audioSession.inputAvailable; 
    if (! audioHWAvailable) 
    { 
     UIAlertView *cantRecordAlert = 
     [[UIAlertView alloc] initWithTitle: @"Warning" 
            message: @"Audio input hardware not available" 
            delegate: nil 
         cancelButtonTitle:@"OK" 
         otherButtonTitles:nil]; 
     [cantRecordAlert show]; 
     return; 
    } 


} 

// 2 ° metodo

-(void) stopVideoRecording 

    { 
    [player.view removeFromSuperview]; 
    [player stop]; 
    [movieFileOutput stopRecording]; 

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_url options:nil]; 
    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:outputURL options:nil]; 

    mixComposition = [AVMutableComposition composition]; 

    AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio 
                         preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) 
             ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] 
             atTime:kCMTimeZero error:nil]; 

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo 
                        preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
            ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] 
            atTime:kCMTimeZero error:nil]; 

    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition 
                      presetName:AVAssetExportPresetPassthrough]; 

    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    [compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform]; 
} 

// gioco finale del video

AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:mixComposition]; 
AVPlayer *player1 = [AVPlayer playerWithPlayerItem:playerItem]; 
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player1]; 
[playerLayer setFrame:CGRectMake(0, 0, 320, 480)]; 
[[[self view] layer] addSublayer:playerLayer]; 
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
[player1 play]; 
player1.actionAtItemEnd = AVPlayerActionAtItemEndNone; 
+0

puoi pubblicare un intero codice su github ..? in modo che sia facile per noi ottenere la demo esatta per l'uso – Anny

1

Penso che questo possa aiutare ..

AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil]; 
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil]; 

AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio 
                        preferredTrackID:kCMPersistentTrackID_Invalid]; 
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) 
            ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] 
            atTime:kCMTimeZero error:nil]; 

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo 
                        preferredTrackID:kCMPersistentTrackID_Invalid]; 
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
           ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] 
           atTime:kCMTimeZero error:nil]; 

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition 
                     presetName:AVAssetExportPresetPassthrough]; 

NSString* videoName = @"export.mov"; 

NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName]; 
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath]; 

if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) 
{ 
    [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil]; 
} 

_assetExport.outputFileType = @"com.apple.quicktime-movie"; 
DLog(@"file type %@",_assetExport.outputFileType); 
_assetExport.outputURL = exportUrl; 
_assetExport.shouldOptimizeForNetworkUse = YES; 

[_assetExport exportAsynchronouslyWithCompletionHandler: 
^(void) {  
      // your completion code here 
    }  
} 
]; 

di cortesia: - https://stackoverflow.com/a/3456565/1865424

e si può anche verificare il codice per la registrazione video dalla fotocamera frontale.

-(IBAction)cameraLibraryButtonClick:(id)sender{ 
    if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {    
     UIImagePickerController *videoRecorder = [[UIImagePickerController alloc]init]; 
     videoRecorder.delegate = self; 
     NSArray *sourceTypes = [UIImagePickerController availableMediaTypesForSourceType:videoRecorder.sourceType]; 
     NSLog(@"Available types for source as camera = %@", sourceTypes); 
     if (![sourceTypes containsObject:(NSString*)kUTTypeMovie]) { 
      UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil 
                  message:@"Device Not Supported for video Recording."                  delegate:self 
                cancelButtonTitle:@"Yes" 
                otherButtonTitles:@"No",nil]; 
      [alert show]; 
      [alert release]; 
      return; 
     } 
     videoRecorder.cameraDevice=UIImagePickerControllerCameraDeviceFront; 
     videoRecorder.sourceType = UIImagePickerControllerSourceTypeCamera; 
     videoRecorder.mediaTypes = [NSArray arrayWithObject:(NSString*)kUTTypeMovie];   
     videoRecorder.videoQuality = UIImagePickerControllerQualityTypeLow; 
     videoRecorder.videoMaximumDuration = 120; 

     self.imagePicker = videoRecorder;     
     [videoRecorder release]; 
     [self presentModalViewController:self.imagePicker animated:YES]; 
     newMedia = YES; 
    } 
    else { 
     [self displaysorceError]; 
    } 


} 

di cortesia: - https://stackoverflow.com/a/14154289/1865424

Se questi non funziona per me you..Let know..But Penso che questo vi aiuterà fuori ..

+0

Vuoi implementare la registrazione video e audio all'interno dello schermo dell'iPhone, grazie –

+1

questo ti aiuterà. @ MacGeek – Shivaay