上面的图说明的是这个混合的过程,下面放代码:

- (void)mergeAndExportVideos:(NSArray*)videosPathArray withOutPath:(NSString*)outpath{
if (videosPathArray.count == ) {
return;
}
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime totalDuration = kCMTimeZero;
for (int i = ; i < videosPathArray.count; i++) {
AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videosPathArray[i]]];
NSError *erroraudio = nil;
     //获取AVAsset中的音频 或者视频
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
     //向通道内加入音频或者视频
BOOL ba = [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetAudioTrack
atTime:totalDuration
error:&erroraudio]; NSLog(@"erroraudio:%@%d",erroraudio,ba);
NSError *errorVideo = nil;
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]firstObject];
BOOL bl = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetVideoTrack
atTime:totalDuration
error:&errorVideo]; NSLog(@"errorVideo:%@%d",errorVideo,bl);
totalDuration = CMTimeAdd(totalDuration, asset.duration);
}
NSLog(@"%@",NSHomeDirectory()); NSURL *mergeFileURL = [NSURL fileURLWithPath:outpath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"exporter%@",exporter.error);
}];

下面是方法详解:

- (AVMutableCompositionTrack *)addMutableTrackWithMediaType:(NSString *)mediaType preferredTrackID:(CMPersistentTrackID)preferredTrackID;

参数:

mediaType          :数据类型  AVMediaTypeVideo 视频   AVMediaTypeAudio  音频

preferredTrackID :可自由设定 但建议kCMPersistentTrackID_Invalid也就是0

//向通道内加入音频或者视频
- (BOOL)insertTimeRange:(CMTimeRange)timeRange ofTrack:(AVAssetTrack *)track atTime:(CMTime)startTime error:(NSError * __nullable * __nullable)outError;

参数:

timeRange:  插入视频/音频的的时间段

track        :插入的视频/音频

补充调整视频方向的方法,个人感觉不是很好,希望大家给点建议: 从stackoverflow查到的具体网址忘了

代码如下:

- (void)startExportVideoWithVideoAsset:(AVURLAsset *)videoAsset completion:(void (^)(NSString *outputPath))completion {

    AVAssetExportSession *session = [[AVAssetExportSession alloc]initWithAsset:videoAsset presetName:AVAssetExportPresetHighestQuality];
    //
    AVMutableVideoComposition *videoComposition = [self fixedCompositionWithAsset:videoAsset];
    if (videoComposition.renderSize.width) {
        // 修正视频转向
        session.videoComposition = videoComposition;
    }
    //
    NSDateFormatter *formater = [[NSDateFormatter alloc] init];
    [formater setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
    NSString *outputPath = [NSHomeDirectory() stringByAppendingFormat:@"/tmp/output-%@.mp4", [formater stringFromDate:[NSDate date]]];
    session.outputURL = [NSURL fileURLWithPath:outputPath];
    
    // Optimize for network use.
    session.shouldOptimizeForNetworkUse = true;
    
    NSArray *supportedTypeArray = session.supportedFileTypes;
    if ([supportedTypeArray containsObject:AVFileTypeMPEG4]) {
        session.outputFileType = AVFileTypeMPEG4;
    } else if (supportedTypeArray.count == 0) {
        NSLog(@"No supported file types 视频类型暂不支持导出");
        return;
    } else {
        session.outputFileType = [supportedTypeArray firstObject];
    }
    
    if (![[NSFileManager defaultManager] fileExistsAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/tmp"]]) {
        [[NSFileManager defaultManager] createDirectoryAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/tmp"] withIntermediateDirectories:YES attributes:nil error:nil];
    }
    // Begin to export video to the output path asynchronously.
    [session exportAsynchronouslyWithCompletionHandler:^(void) {
        NSLog(@"%@",[session.error description]);
        switch (session.status) {
            case AVAssetExportSessionStatusUnknown:
                NSLog(@"AVAssetExportSessionStatusUnknown"); break;
            case AVAssetExportSessionStatusWaiting:
                NSLog(@"AVAssetExportSessionStatusWaiting"); break;
            case AVAssetExportSessionStatusExporting:
                NSLog(@"AVAssetExportSessionStatusExporting"); break;
            case AVAssetExportSessionStatusCompleted: {
                NSLog(@"AVAssetExportSessionStatusCompleted");
                dispatch_async(dispatch_get_main_queue(), ^{
                    if (completion) {
                        completion(outputPath);
                    }
                });
            }  break;
            case AVAssetExportSessionStatusFailed:{
                dispatch_async(dispatch_get_main_queue(), ^{
                    if (completion) {
                        completion(nil);
                    }
                });
                NSLog(@"AVAssetExportSessionStatusFailed"); break;
            }
            default: break;
        }
    }];
//    }
} /// 获取优化后的视频转向信息
- (AVMutableVideoComposition *)fixedCompositionWithAsset:(AVAsset *)videoAsset {
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    // 视频转向
    int degrees = [self degressFromVideoFileWithAsset:videoAsset];
    if (degrees != 0) {
        CGAffineTransform translateToCenter;
        CGAffineTransform mixedTransform;
        videoComposition.frameDuration = CMTimeMake(1, 30);
        
        NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
        AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
        
        if (degrees == 90) {
            // 顺时针旋转90°
            translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.height, 0.0);
            mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
            videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height,videoTrack.naturalSize.width);
        } else if(degrees == 180){
            // 顺时针旋转180°
            translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.width, videoTrack.naturalSize.height);
            mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI);
            videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.width,videoTrack.naturalSize.height);
        } else if(degrees == 270){
            // 顺时针旋转270°
            translateToCenter = CGAffineTransformMakeTranslation(0.0, videoTrack.naturalSize.width);
            mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2*3.0);
            videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height,videoTrack.naturalSize.width);
        }
        
        AVMutableVideoCompositionInstruction *roateInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        roateInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
        AVMutableVideoCompositionLayerInstruction *roateLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
        
        [roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
        
        roateInstruction.layerInstructions = @[roateLayerInstruction];
        // 加入视频方向信息
        videoComposition.instructions = @[roateInstruction];
    }
    return videoComposition;
}
/// 获取视频角度
- (int)degressFromVideoFileWithAsset:(AVAsset *)asset {
    int degress = 0;
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    if([tracks count] > 0) {
        AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
        CGAffineTransform t = videoTrack.preferredTransform;
        if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){
            // Portrait
            degress = 90;
        } else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){
            // PortraitUpsideDown
            degress = 270;
        } else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){
            // LandscapeRight
            degress = 0;
        } else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
            // LandscapeLeft
            degress = 180;
        }
    }
    return degress;
}

ios 视频拼接/合成的更多相关文章

  1. ffmpeg 实现多宫格效果,视频拼接合成

    通过FFmpeg建立画布,以多宫格方式展现 一下为执行命令 -re -i 1.mp4 -re -i 2.mp4 -re -i 3.mp4 -re -i 4.mp4 -filter_complex &q ...

  2. IOS 视频分解图片、图片合成视频

    在IOS视频处理中,视频分解图片和图片合成视频是IOS视频处理中经常遇到的问题,这篇博客就这两个部分对IOS视频图像的相互转换做一下分析. (1)视频分解图片 这里视频分解图片使用的是AVAssetI ...

  3. iOS视频开发经验

    iOS视频开发经验 手机比PC的优势除了便携外,我认为最重要的就是可以快速方便的创作多媒体作品.照片分享,语音输入,视频录制,地理位置.一个成功的手机APP从产品形态上都有这其中的一项或多项,比如in ...

  4. iOS - 视频开发

    视频实质: 纯粹的视频(不包括音频)实质上就是一组帧图片,经过视频编码成为视频(video)文件再把音频(audio)文件有些还有字幕文件组装在一起成为我们看到的视频(movie)文件.1秒内出现的图 ...

  5. 最近这么火的iOS视频直播

    快速集成iOS基于RTMP的视频推流 http://www.jianshu.com/p/8ea016b2720e iOS视频直播初窥:高仿<喵播APP> http://www.jiansh ...

  6. 浅谈iOS视频开发

     浅谈iOS视频开发 这段时间对视频开发进行了一些了解,在这里和大家分享一下我自己觉得学习步骤和资料,希望对那些对视频感兴趣的朋友有些帮助. 一.iOS系统自带播放器 要了解iOS视频开发,首先我们从 ...

  7. 基于SURF特征的图像与视频拼接技术的研究和实现(一)

    基于SURF特征的图像与视频拼接技术的研究和实现(一)      一直有计划研究实时图像拼接,但是直到最近拜读西电2013年张亚娟的<基于SURF特征的图像与视频拼接技术的研究和实现>,条 ...

  8. 最简单的基于FFmpeg的移动端例子:IOS 视频解码器-保存

    ===================================================== 最简单的基于FFmpeg的移动端例子系列文章列表: 最简单的基于FFmpeg的移动端例子:A ...

  9. 最简单的基于FFmpeg的移动端例子:IOS 视频转码器

    ===================================================== 最简单的基于FFmpeg的移动端例子系列文章列表: 最简单的基于FFmpeg的移动端例子:A ...

随机推荐

  1. git如何解决冲突(代码托管在coding)

    分支A提交合并请求到分支B,有冲突 git fetch code 拉取远程仓库的其他分支代码(我拉代码是remote add code所以这里是code,可以用git remote查看) git ch ...

  2. Android学习总结——开篇

    不知不觉做Android开发已经2年多了,一直都没有好好总结过Android的相关内容,是时候好好总结和梳理一下Android的相关内容了,就从最基本的Activity开始吧,关于Activity的知 ...

  3. 用虚拟机安装Linux系统的过程

    1.打开VMware->点击File->点击New Virtual Machine.. 2.我们选择Custom并点击Next 3.保持默认然后点next 4.选择图中所示内容,点Next ...

  4. 机器学习基石 4 Feasibility of Learning

    机器学习基石 4 Feasibility of Learning Learning is Impossible? 机器学习:通过现有的训练集 \(D\) 学习,得到预测函数 \(h(x)\) 使得它接 ...

  5. 笔记本win10关机异常解决

    自从使用了win10 以后,小编已经情不自禁的爱上了她——迄今为止最NB的windows系统 但令人头疼的问题也随之而来,前几天购置了一款三星的SSD固态硬盘,马上就装了win10,某天晚上关机以后发 ...

  6. Win10上编译CoreCLR的Windows和Linux版本

    一.编译环境 首先,不管是Windows还是Linux版本CoreCLR的编译,都是在Windows10上进行的. 二.CoreCLR for Windows 在Windows上做编译怎么能少得了Vi ...

  7. JavaScript实现常见排序算法

    列表 冒泡排序 选择排序 插入排序 快速排序 希尔排序 归并排序 冒泡排序 // 输入:[5, 6, 3, 4, 8, 0, 1, 4, 7] // 输出:[0, 1, 3, 4, 4, 5, 6, ...

  8. java学习笔记 --- 多态

    一.多态 (1)定义:同一个对象在不同时刻体现出来的不同状态.父类的引用或者接口的引用指向了自己的子类对象.   Dog d = new Dog();//Dog对象的类型是Dog类型.  Animal ...

  9. C#文件上传类,文件流,字节数组等

    using System;using System.IO;using System.Web;using System.Web.UI.WebControls; namespace DotNet.Util ...

  10. node.js下mongoose简单操作实例

    Mongoose API : http://mongoosejs.com/docs/api.html // mongoose 链接var mongoose = require('mongoose'); ...