Attività: Unire l'immagine del flyer nel video del flyer.Unione video in background iOS
Casi:
- Creare un volantino [aggiungi emoticon/text..etc]
- Crea il video
Case1
- premere il tasto BACK [l'utente andrà alla lista di app del volantino s schermo], durante questo stiamo unendo flyerSnapShoot in flyerVideo.and funziona perfettamente.
- Passando alla Galleria del telefono, stiamo visualizzando il video aggiornato al suo interno.
Case2
- Premere il tasto Home di iPhone, sto facendo le cose stesse come sopra, ma di fronte al seguente errore.
FAIL = errore di dominio = Codice AVFoundationErrorDomain = -11.800 "L'operazione non può essere completata" UserInfo = {0x17266d40 NSLocalizedDescription = L'operazione non può essere completata, NSUnderlyingError = 0x172b3920 "L'operazione non poteva essere . completato (. OSStatus errore -16.980)", NSLocalizedFailureReason = Si è verificato un errore sconosciuto (-16.980)}
Codice:
- (void)modifyVideo:(NSURL *)src destination:(NSURL *)dest crop:(CGRect)crop
scale:(CGFloat)scale overlay:(UIImage *)image
completion:(void (^)(NSInteger, NSError *))callback {
// Get a pointer to the asset
AVURLAsset* firstAsset = [AVURLAsset URLAssetWithURL:src options:nil];
// Make an instance of avmutablecomposition so that we can edit this asset:
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Add tracks to this composition
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// Audio track
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// Image video is always 30 seconds. So we use that unless the background video is smaller.
CMTime inTime = CMTimeMake(MAX_VIDEO_LENGTH * VIDEOFRAME, VIDEOFRAME);
if (CMTimeCompare(firstAsset.duration, inTime) < 0) {
inTime = firstAsset.duration;
}
// Add to the video track.
NSArray *videos = [firstAsset tracksWithMediaType:AVMediaTypeVideo];
CGAffineTransform transform;
if (videos.count > 0) {
AVAssetTrack *track = [videos objectAtIndex:0];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, inTime) ofTrack:track atTime:kCMTimeZero error:nil];
transform = track.preferredTransform;
videoTrack.preferredTransform = transform;
}
// Add the audio track.
NSArray *audios = [firstAsset tracksWithMediaType:AVMediaTypeAudio];
if (audios.count > 0) {
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, inTime) ofTrack:[audios objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
NSLog(@"Natural size: %.2f x %.2f", videoTrack.naturalSize.width, videoTrack.naturalSize.height);
// Set the mix composition size.
mixComposition.naturalSize = crop.size;
// Set up the composition parameters.
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, VIDEOFRAME);
videoComposition.renderSize = crop.size;
videoComposition.renderScale = 1.0;
// Pass through parameters for animation.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, inTime);
// Layer instructions
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
// Set the transform to maintain orientation
if (scale != 1.0) {
CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTransform = CGAffineTransformTranslate(CGAffineTransformIdentity,
-crop.origin.x,
-crop.origin.y);
transform = CGAffineTransformConcat(transform, scaleTransform);
transform = CGAffineTransformConcat(transform, translateTransform);
}
[passThroughLayer setTransform:transform atTime:kCMTimeZero];
passThroughInstruction.layerInstructions = @[ passThroughLayer ];
videoComposition.instructions = @[passThroughInstruction];
// If an image is given, then put that in the animation.
if (image != nil) {
// Layer that merges the video and image
CALayer *parentLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height);
// Layer that renders the video.
CALayer *videoLayer = [CALayer layer];
videoLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height);
[parentLayer addSublayer:videoLayer];
// Layer that renders flyerly image.
CALayer *imageLayer = [CALayer layer];
imageLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height);
imageLayer.contents = (id)image.CGImage;
[imageLayer setMasksToBounds:YES];
[parentLayer addSublayer:imageLayer];
// Setup the animation tool
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
// Now export the movie
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
// Export the URL
exportSession.outputURL = dest;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
callback(exportSession.status, exportSession.error);
}];
}
io chiamo questa funzione da AppDelegate.m
- (void)applicationDidEnterBackground:(UIApplication *)application
{
bgTask = [application beginBackgroundTaskWithName:@"MyTask" expirationHandler:^{
// Clean up any unfinished task business by marking where you
// stopped or ending the task outright.
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
}];
// Start the long-running task and return immediately.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
// Do the work associated with the task, preferably in chunks.
[self goingToBg];
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
});
NSLog(@"backgroundTimeRemaining: %f", [[UIApplication sharedApplication] backgroundTimeRemaining]);
}
Qui, l'utente può convertire video inverso e li Gioca con audio nel lettore Video fusione -Link: https: //github.com/mehulparmar4ever/ConvertVideoReverse NOTA: 1.User può invertire video originale. 2.Convert Audio From Video 3.Aggiungere insieme file audio e video e riprodurli – Mehul
hmm, in questo momento non sono nello stato per ripristinare il mio codice, sto cercando la correzione per questo, non per modificare la libreria :) –