合并iPhone应用程序中的两个视频文件 [英] Merge Two Video files in iPhone Application

查看:190
本文介绍了合并iPhone应用程序中的两个视频文件的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

在我的一个应用程序中,我需要在视频中添加一些图像。所以我分两部分切断视频,并从该图像制作一个视频。现在我想结合这三个视频文件,制作一个视频文件。但我不知道将这三个视频结合起来。我在这里看到一些代码。但这对我没有帮助。现在,我希望代码合并所有视频,以便在视频文件中放置当前视图屏幕。介于。



用于中断视频文件

  NSURL * url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@AffagogatoofType:@mp4]]; 
AVURLAsset * anAsset = [[AVURLAsset alloc] initWithURL:url options:nil];




for(int i = 0; i< 2; i ++){
AVAssetExportSession * exportSession = [[AVAssetExportSession alloc]
initWithAsset:anAsset presetName:AVAssetExportPresetLowQuality];
NSString * filePath = nil;
NSUInteger count = 0;
do {
filePath = NSTemporaryDirectory();

NSString * numberString = count> 0? [NSString stringWithFormat:@ - %i,count]:@;
filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:@Output - %@。mov,numberString]];
count ++;
} while([[NSFileManager defaultManager] fileExistsAtPath:filePath]);

exportSession.outputURL = [NSURL fileURLWithPath:filePath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTimeRange范围;
if(i == 0){
CMTime start = CMTimeMakeWithSeconds(0.0,600);
CMTime duration = CMTimeMakeWithSeconds(10.0,600);
range = CMTimeRangeMake(开始,持续时间);
} else {
CMTime start = CMTimeMakeWithSeconds(10.0,600);
range = CMTimeRangeMake(start,anAsset.duration);
}
exportSession.timeRange = range;

[exportSession exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(),^ {
[self exportDidFinish:exportSession Tag:i];
}) ;
}];
}

从图片中获取视频

  CGRect rect = CGRectMake(0,0,320,480); 
view = [[UIView alloc] initWithFrame:rect];

NSArray * paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
NSString * documentsDirectory =([路径计数]> 0)? [paths objectAtIndex:0]:nil;
NSString * path = [documentsDirectory stringByAppendingPathComponent:[@video2stringByAppendingString:@。mov]];

CGSize size = self.view.frame.size;


NSMutableDictionary * attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString *)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString *)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:480] forKey:(NSString *)kCVPixelBufferHeightKey];


NSError * error = nil;
AVAssetWriter * videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:& error];


NSParameterAssert(videoWriter);
NSDictionary * videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,
nil];

AVAssetWriterInput * writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];


AVAssetWriterInputPixelBufferAdaptor * adapter = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];


NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];


//开始一个会话:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//将uiimage转换为CGImage。

xPixel = 0;
yPixel = 250;

buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@1.jpeg] CGImage]];



CVPixelBufferPoolCreatePixelBuffer(NULL,adaptor.pixelBufferPool,& buffer);
[adapter appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

for(int i = 0; i< 2; i ++)
{
if([writerInput isReadyForMoreMediaData])
{
// NSLog( @inside for loop%d,i);

for(int pframetime = 1; pframetime< = 2; pframetime ++)
{

CMTime frameTime = CMTimeMake(pframetime,25);
CMTime lastTime = CMTimeMake(i,1); // i是
以上循环的0到19 CMTime presentTime = CMTimeAdd(lastTime,frameTime);

if(i == 0)
buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@1.jpeg] CGImage]];
else
buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@2.jpeg] CGImage]];
while(![writerInput isReadyForMoreMediaData])
{
[NSThread sleepForTimeInterval:0.05];
}

[adapter appendPixelBuffer:buffer withPresentationTime:presentTime];
i ++;
}
if(buffer)
CVBufferRelease(buffer);
// [NSThread sleepForTimeInterval:0.1];
}
}
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoPathArray addObject:path];

//完成会话:

[videoWriter release];
[writerInput release];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

对于合并视频文件我尝试使用此代码,但这里没有用处是视频之间的一些空白屏幕

  AVMutableComposition * mixComposition = [AVMutableComposition composition]; 

NSString * documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES)objectAtIndex:0];

NSString * video_inputFilePath1 = [videoPathArray objectAtIndex:1];
NSURL * video_inputFileUrl1 = [NSURL fileURLWithPath:video_inputFilePath1];

NSString * video_inputFilePath2 = [videoPathArray objectAtIndex:0];
NSURL * video_inputFileUrl2 = [NSURL fileURLWithPath:video_inputFilePath2];

NSString * video_inputFilePath3 = [videoPathArray objectAtIndex:2];
NSURL * video_inputFileUrl3 = [NSURL fileURLWithPath:video_inputFilePath3];

NSString * outputFileName = @outputFile.mov;
NSString * outputFilePath = [NSString stringWithFormat:@%@ /%@,documentsDirectoryPath,outputFileName];

NSURL * outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

if([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

CMTime nextClipStartTime = kCMTimeZero;


AVURLAsset * videoAsset1 = [[AVURLAsset alloc] initWithURL:video_inputFileUrl1 options:nil];
AVURLAsset * videoAsset2 = [[AVURLAsset alloc] initWithURL:video_inputFileUrl2 options:nil];
AVURLAsset * videoAsset3 = [[AVURLAsset alloc] initWithURL:video_inputFileUrl3 options:nil];


CMTimeRange video_timeRange1 = CMTimeRangeMake(kCMTimeZero,videoAsset1.duration);
AVMutableCompositionTrack * a_compositionVideoTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack1 insertTimeRange:video_timeRange1 ofTrack:[[videoAsset1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

CMTimeRange video_timeRange3 = CMTimeRangeMake(nextClipStartTime,videoAsset3.duration);

[a_compositionVideoTrack1 insertTimeRange:video_timeRange3 ofTrack:[[videoAsset3 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil];

CMTimeRange video_timeRange2 = CMTimeRangeMake(nextClipStartTime,videoAsset1.duration);
[a_compositionVideoTrack1 insertTimeRange:video_timeRange2 ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil];



AVAssetExportSession * _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];
_assetExport.shouldOptimizeForNetworkUse = YES;
_assetExport.outputFileType = @com.apple.quicktime-movie;
_assetExport.outputURL = outputFileUrl;


解决方案

尝试观看名为在AV中使用媒体的视频基金会在苹果开发者门户网站。它告诉你如何做你正在描述的事情。



https://developer.apple.com/videos/wwdc/2011/


In One of my application i need to add some image in video. so i cut break video in two part and also make one video from that image. now i want to combine this three video file and make one video file. but i am not get any idea to combine this three video. i see some code over here. but that is not helpful to me. for break video and for make video from image i used below code now i want code to merge this all video.

Any other idea for put current view screen in video file in between.

For break video file

NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"Affagogato" ofType:@"mp4"]];
 AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil];




for(int i = 0; i < 2; i++) {
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
                                           initWithAsset:anAsset presetName:AVAssetExportPresetLowQuality];
    NSString *filePath = nil;
    NSUInteger count = 0;
    do {
        filePath = NSTemporaryDirectory();

        NSString *numberString = count > 0 ? [NSString stringWithFormat:@"-%i", count] : @"";
        filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:@"Output-%@.mov", numberString]];
        count++;
    } while([[NSFileManager defaultManager] fileExistsAtPath:filePath]);      

    exportSession.outputURL = [NSURL fileURLWithPath:filePath];
    exportSession.outputFileType = AVFileTypeQuickTimeMovie;
    CMTimeRange range;
    if(i == 0){
        CMTime start = CMTimeMakeWithSeconds(0.0, 600);
        CMTime duration = CMTimeMakeWithSeconds(10.0, 600);
        range = CMTimeRangeMake(start, duration);
    }else{
        CMTime start = CMTimeMakeWithSeconds(10.0, 600);
        range = CMTimeRangeMake(start, anAsset.duration);
    }
    exportSession.timeRange = range;   

    [exportSession exportAsynchronouslyWithCompletionHandler:^
     {
         dispatch_async(dispatch_get_main_queue(), ^{
             [self exportDidFinish:exportSession Tag:i];
         });
     }]; 
}

Get video from Images

CGRect rect=CGRectMake(0, 0, 320, 480);
view = [[UIView alloc]initWithFrame:rect];

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *path = [documentsDirectory stringByAppendingPathComponent:[@"video2"  stringByAppendingString:@".mov"]];

CGSize size = self.view.frame.size;


NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:480] forKey:(NSString*)kCVPixelBufferHeightKey];


NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];


NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                    assetWriterInputWithMediaType:AVMediaTypeVideo
                                    outputSettings:videoSettings] retain];


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                 sourcePixelBufferAttributes:nil];


NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];


//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.

xPixel=0;
yPixel=250;

buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]];



CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

for (int i = 0;i<2; i++)
{
    if([writerInput isReadyForMoreMediaData])
    {
        //NSLog(@"inside for loop %d",i);

        for(int pframetime=1;pframetime<=2;pframetime++)
        {  

            CMTime frameTime = CMTimeMake(pframetime,25);
            CMTime lastTime=CMTimeMake(i,1); //i is from 0 to 19 of the loop above
            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            if(i==0)
                buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]];
            else
                buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"2.jpeg"] CGImage]];
            while ( ![writerInput isReadyForMoreMediaData] )
            {
                [NSThread sleepForTimeInterval:0.05];
            }

            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
            i++;
        }
        if(buffer)
            CVBufferRelease(buffer);
        //[NSThread sleepForTimeInterval:0.1];
    }
}
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoPathArray addObject:path];

//Finish the session:

[videoWriter release];
[writerInput release];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

For Merge video files i try this code but not useful here is some blank screen between video

   AVMutableComposition* mixComposition = [AVMutableComposition composition];

    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];  

    NSString* video_inputFilePath1 = [videoPathArray objectAtIndex:1];
    NSURL*    video_inputFileUrl1 = [NSURL fileURLWithPath:video_inputFilePath1];

    NSString* video_inputFilePath2 = [videoPathArray objectAtIndex:0];
    NSURL*    video_inputFileUrl2 = [NSURL fileURLWithPath:video_inputFilePath2];

    NSString* video_inputFilePath3 = [videoPathArray objectAtIndex:2];
    NSURL*    video_inputFileUrl3 = [NSURL fileURLWithPath:video_inputFilePath3];

    NSString* outputFileName = @"outputFile.mov";
    NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,outputFileName];

    NSURL*    outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) 
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

    CMTime nextClipStartTime = kCMTimeZero;


    AVURLAsset* videoAsset1 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl1 options:nil];
    AVURLAsset* videoAsset2 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl2 options:nil];
    AVURLAsset* videoAsset3 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl3 options:nil];


    CMTimeRange video_timeRange1 = CMTimeRangeMake(kCMTimeZero,videoAsset1.duration);
    AVMutableCompositionTrack *a_compositionVideoTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack1 insertTimeRange:video_timeRange1 ofTrack:[[videoAsset1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

    CMTimeRange video_timeRange3 = CMTimeRangeMake(nextClipStartTime,videoAsset3.duration);

    [a_compositionVideoTrack1 insertTimeRange:video_timeRange3 ofTrack:[[videoAsset3 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil];

    CMTimeRange video_timeRange2 = CMTimeRangeMake(nextClipStartTime,videoAsset1.duration);
    [a_compositionVideoTrack1 insertTimeRange:video_timeRange2 ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil];



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];   
    _assetExport.shouldOptimizeForNetworkUse = YES;
    _assetExport.outputFileType = @"com.apple.quicktime-movie";
    _assetExport.outputURL = outputFileUrl;

解决方案

Try watching the video called "Working with Media in AV Foundation" in the apple developers portal. It tells you how to do what you are describing.

https://developer.apple.com/videos/wwdc/2011/

这篇关于合并iPhone应用程序中的两个视频文件的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆