provare questo codice:
-(void)convertVideo:(NSString *)videoPath{
AVURLAsset *firstAsset=[[AVURLAsset alloc]initWithURL:[NSURL URLWithString: [NSString stringWithFormat:@"%@", videoPath]] options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
CMTime audioTime = kCMTimeIndefinite;
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
[audioSession setActive:YES error:nil];
if (OS_VERSION>=7) {
if (firstAsset!=NULL) {
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
}
else{
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:audioTime error:nil];
}
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0){
FirstAssetOrientation_= UIImageOrientationRight;
isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0){
FirstAssetOrientation_ = UIImageOrientationLeft;
isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0){
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0){
FirstAssetOrientation_ = UIImageOrientationDown;
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
CGFloat FirstAssetScaleToFitRatio = 0;
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
/*if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = 0.67;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width);
}else{
naturalSizeFirst = FirstAssetTrack.naturalSize;
}
*/
CGSize naturalSizeFirst;
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = FirstAssetTrack.naturalSize.width/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width);
}else {
naturalSizeFirst = FirstAssetTrack.naturalSize;
}
MainCompositionInst.renderSize = CGSizeMake(naturalSizeFirst.width, naturalSizeFirst.height);
NSString *tmpDirectory = NSTemporaryDirectory();
NSString *fname = [NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000];
NSString *tmpFile = [tmpDirectory stringByAppendingPathComponent:fname];
NSURL *url = [NSURL fileURLWithPath:tmpFile];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exporter.status) {
[self.progressStatus setText:@"Converted..."];
selectedVideo = selectedVideo+1;
} else if (AVAssetExportSessionStatusFailed == exporter.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
//ALog(@"AVAssetExportSessionStatusFailed %@",exporter.error);
} else {
//ALog(@"Export Session Status: %d", exporter.status);
}
}];
}
}
fonte
2015-01-06 11:21:15
Alcuni più detials favore. Informazioni sugli asset, se si stanno unendo asset o qualcosa del genere? – Maverick
Questo problema è specifico per iPad retina ?? – uchiha
Per aggiungere alla domanda di @ Maverick, le risorse sono create in modo programmatico? Se sì, proviene dalla tua app o le stai importando da qualche altra parte? –