2012-08-27 10 views
11

Ho usato il codice seguente per aggiungere la sovrapposizione di immagini sul video e quindi esportare il nuovo video generato nella directory del documento. Ma stranamente, il video viene ruotato di 90 gradi.AVMutableVideoComposizione di video ruotato catturato in modalità verticale

- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition 
{ 
    CMTime nextClipStartTime = kCMTimeZero; 
    NSInteger i; 

    // Make transitionDuration no greater than half the shortest clip duration. 
    CMTime transitionDuration = self.transitionDuration; 
    for (i = 0; i < [_clips count]; i++) { 
     NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; 
     if (clipTimeRange) { 
      CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration; 
      halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator. 
      transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration); 
     } 
    } 

    // Add two video tracks and two audio tracks. 
    AVMutableCompositionTrack *compositionVideoTracks[2]; 
    AVMutableCompositionTrack *compositionAudioTracks[2]; 
    compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

    CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); 
    CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); 

    // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration. 
    for (i = 0; i < [_clips count]; i++) { 
     NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ... 
     AVURLAsset *asset = [_clips objectAtIndex:i]; 
     NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; 
     CMTimeRange timeRangeInAsset; 
     if (clipTimeRange) 
      timeRangeInAsset = [clipTimeRange CMTimeRangeValue]; 
     else 
      timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]); 

     AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
     [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; 

     /* 
     CGAffineTransform t = clipVideoTrack.preferredTransform; 
     NSLog(@"Transform1 : %@",t); 
     */ 
     AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
     [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil]; 

     // Remember the time range in which this clip should pass through. 
     // Every clip after the first begins with a transition. 
     // Every clip before the last ends with a transition. 
     // Exclude those transitions from the pass through time ranges. 
     passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration); 
     if (i > 0) { 
      passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration); 
      passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); 
     } 
     if (i+1 < [_clips count]) { 
      passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); 
     } 

     // The end of this clip will overlap the start of the next by transitionDuration. 
     // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.) 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
     nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration); 

     // Remember the time range for the transition to the next item. 
     transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration); 
    } 

    // Set up the video composition if we are to perform crossfade or push transitions between clips. 
    NSMutableArray *instructions = [NSMutableArray array]; 

    // Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A". 
    for (i = 0; i < [_clips count]; i++) { 
     NSInteger alternatingIndex = i % 2; // alternating targets 

     // Pass through clip i. 
     AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     passThroughInstruction.timeRange = passThroughTimeRanges[i]; 
     AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; 
     /* 
     CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2); 
     CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0); 
     [passThroughLayer setTransform:rotateTranslate atTime:kCMTimeZero]; 
     */ 
     passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer]; 
     [instructions addObject:passThroughInstruction]; 

     if (i+1 < [_clips count]) { 
      // Add transition from clip i to clip i+1. 

      AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
      transitionInstruction.timeRange = transitionTimeRanges[i]; 
      AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; 
      AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]]; 

      if (self.transitionType == SimpleEditorTransitionTypeCrossFade) { 
       // Fade out the fromLayer by setting a ramp from 1.0 to 0.0. 
       [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]]; 
      } 
      else if (self.transitionType == SimpleEditorTransitionTypePush) { 
       // Set a transform ramp on fromLayer from identity to all the way left of the screen. 
       [fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]]; 
       // Set a transform ramp on toLayer from all the way right of the screen to identity. 
       [toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]]; 
      } 

      transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil]; 
      [instructions addObject:transitionInstruction]; 
     } 
    } 

    videoComposition.instructions = instructions; 


} 

Si prega di aiuto, in quanto non sono in grado di esportare il video in modalità corretta. Qualsiasi aiuto apprezzato. Grazie.

risposta

15

Per impostazione predefinita, quando si esporta un video utilizzando AVAssetExportSession, il video verrà ruotato rispetto all'orientamento originale. Devi applicare la sua trasformazione per impostare l'orientamento esatto. Ti preghiamo di provare sotto il codice per fare lo stesso.

- (AVMutableVideoCompositionLayerInstruction *)layerInstructionAfterFixingOrientationForAsset:(AVAsset *)inAsset 
                        forTrack:(AVMutableCompositionTrack *)inTrack 
                         atTime:(CMTime)inTime 
{ 
    //FIXING ORIENTATION// 
    AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:inTrack]; 
    AVAssetTrack *videoAssetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
    BOOL isVideoAssetPortrait_ = NO; 
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; 

    if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;} 
    if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;} 
    if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;} 
    if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;} 

    CGFloat FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.width; 

    if(isVideoAssetPortrait_) { 
     FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.height; 
     CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
     [videolayerInstruction setTransform:CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
    }else{ 
     CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
     [videolayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; 
    } 
    [videolayerInstruction setOpacity:0.0 atTime:inTime]; 
    return videolayerInstruction; 
} 

Spero che questo ti possa aiutare.

AVAssetTrack *assetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

AVMutableCompositionTrack *mutableTrack = [mergeComposition mutableTrackCompatibleWithTrack:assetTrack]; 

AVMutableVideoCompositionLayerInstruction *assetInstruction = [self layerInstructionAfterFixingOrientationForAsset:inAsset forTrack:myLocalVideoTrack atTime:videoTotalDuration]; 

sopra è il codice per chiamare il metodo citato in cui inAsset è la vostra risorsa video e videoTotalDuration è la vostra durata totale video nel CMTime.mergeComposition è oggetto di AVMutableComposition di classe.

Spero che questo possa essere d'aiuto.

MODIFICA: non si tratta di alcun metodo o evento di richiamata, è necessario chiamarlo in modo previsto con i parametri richiesti come indicato sopra.

+0

ho provato questo metodo, ma la sua non essere chiamato durante execution.Can La prego di approfondire è utile, un po 'di più. Grazie. :) – Dhruv

+0

Il mio turno è completamente nero. Puoi per favore aiutarmi perché? – itsji10dra

+0

sarebbe bello se potessi dare il codice swift 3 per quello. –

2

Usa questi qui di seguito il metodo per impostare il corretto orientamento in base al video orientamento risorsa nel AVMutableVideoComposition

-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset 
{ 
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    AVMutableComposition *composition = [AVMutableComposition composition]; 
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 
    CGSize videoSize = videoTrack.naturalSize; 
    BOOL isPortrait_ = [self isVideoPortrait:asset]; 
    if(isPortrait_) { 
     NSLog(@"video is portrait "); 
     videoSize = CGSizeMake(videoSize.height, videoSize.width); 
    } 
    composition.naturalSize  = videoSize; 
    videoComposition.renderSize = videoSize; 
    // videoComposition.renderSize = videoTrack.naturalSize; // 
    videoComposition.frameDuration = CMTimeMakeWithSeconds(1/videoTrack.nominalFrameRate, 600); 

    AVMutableCompositionTrack *compositionVideoTrack; 
    compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil]; 
    AVMutableVideoCompositionLayerInstruction *layerInst; 
    layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
    [layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero]; 
    AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); 
    inst.layerInstructions = [NSArray arrayWithObject:layerInst]; 
    videoComposition.instructions = [NSArray arrayWithObject:inst]; 
    return videoComposition; 
} 


-(BOOL) isVideoPortrait:(AVAsset *)asset 
{ 
    BOOL isPortrait = FALSE; 
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 
    if([tracks count] > 0) { 
    AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; 

    CGAffineTransform t = videoTrack.preferredTransform; 
    // Portrait 
    if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) 
    { 
     isPortrait = YES; 
    } 
    // PortraitUpsideDown 
    if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) { 

     isPortrait = YES; 
    } 
    // LandscapeRight 
    if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) 
    { 
     isPortrait = NO; 
    } 
    // LandscapeLeft 
    if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) 
    { 
     isPortrait = NO; 
    } 
    } 
    return isPortrait; 
} 
13

Ecco un modo un po 'più facile se si vuole semplicemente mantenere la rotazione originale.

// Grab the source track from AVURLAsset for example. 
AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].lastObject; 

// Grab the composition video track from AVMutableComposition you already made. 
AVMutableCompositionTrack *compositionVideoTrack = [composition tracksWithMediaType:AVMediaTypeVideo].lastObject; 

// Apply the original transform.  
if (assetVideoTrack && compositionVideoTrack) { 
    [compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform]; 
} 

// Export... 
+0

Ho perso sei ore su questo! Non posso credere sia così facile Vorrei aver letto questo post prima. – etayluz

+0

se voglio unire più video a uno, allora come posso ottenere l'orientamento originale per ogni video nel mio video finale ?? –

+0

in qualche modo non funziona per me :(https://stackoverflow.com/questions/45127420/avmutablecomposition-orientation-layer-size-are-not-correct –

2

a Swift Dizy answer..this lavori per me

var assetVideoTrack = (sourceAsset.tracksWithMediaType(AVMediaTypeVideo)).last as! AVAssetTrack 

    var compositionVideoTrack = (composition.tracksWithMediaType(AVMediaTypeVideo)).last as! AVMutableCompositionTrack 

    if (assetVideoTrack.playable && compositionVideoTrack.playable) { 

     compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform 
    } 
+1

Come si inizia la composizione? –

1

rapida 2:

 

do { 
      let paths = NSSearchPathForDirectoriesInDomains(
       NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true) 
      let documentsDirectory: AnyObject = paths[0] 
      //this will be changed to accommodate dynamic videos 
      let dataPath = documentsDirectory.stringByAppendingPathComponent(videoFileName+".MOV") 
      let videoAsset = AVURLAsset(URL: NSURL(fileURLWithPath: dataPath), options: nil) 
      let imgGenerator = AVAssetImageGenerator(asset: videoAsset) 
      imgGenerator.appliesPreferredTrackTransform = true 
      let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0, 1), actualTime: nil) 
      let uiImage = UIImage(CGImage: cgImage) 

      videoThumb.image = uiImage 
     } catch let err as NSError { 
      print("Error generating thumbnail: \(err)") 
     } 
+0

imgGenerator.appliesPreferredTrackTransform = true –

+0

superbo .. ha funzionato per me .. – JAck