-
Notifications
You must be signed in to change notification settings - Fork 21
/
Copy pathCCViewController.m
495 lines (376 loc) · 21.8 KB
/
CCViewController.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
//
// CCViewController.m
// VideoEditDemo
//
// Created by mr.cao on 14-6-13.
// Copyright (c) 2014年 mrcao. All rights reserved.
// CMTimeMake和CMTimeMakeWithSeconds 详解
/*CMTimeMake(a,b) a当前第几帧, b每秒钟多少帧.当前播放时间a/b
CMTimeMakeWithSeconds(a,b) a当前时间,b每秒钟多少帧.
CMTimeMake
CMTime CMTimeMake (
int64_t value,
int32_t timescale
);
CMTimeMake顧名思義就是用來建立CMTime用的,
但是千萬別誤會他是拿來用在一般時間用的,
CMTime可是專門用來表示影片時間用的類別,
他的用法為: CMTimeMake(time, timeScale)
time指的就是時間(不是秒),
而時間要換算成秒就要看第二個參數timeScale了.
timeScale指的是1秒需要由幾個frame構成(可以視為fps),
因此真正要表達的時間就會是 time / timeScale 才會是秒.
簡單的舉個例子
CMTimeMake(60, 30);
CMTimeMake(30, 15);
在這兩個例子中所表達在影片中的時間都皆為2秒鐘,
但是影隔播放速率則不同, 相差了有兩倍.*/
#import "CCViewController.h"
#import "CommonHelper.h"
@implementation CCViewController
- (void)viewDidUnload
{
[self setJsVideoScrubber:nil];
[super viewDidUnload];
}
- (void) viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[self removeAllFiles];
}
- (void)viewDidLoad
{
[super viewDidLoad];
originViedoWidth=0;
originViedoHeight=0;
originDuration=0;
originViedoFPS=0;
self.view.backgroundColor=[UIColor darkGrayColor];
self.jsVideoScrubber=[[JSVideoScrubber alloc]initWithFrame:CGRectMake(0, 44,ViewWidth,50)];
[self.view addSubview:self.jsVideoScrubber];
self.duration=[[UILabel alloc]initWithFrame:CGRectMake(ViewWidth-32, self.jsVideoScrubber.frame.origin.y+self.jsVideoScrubber.frame.size.height, 30, 20)];
self.duration.textAlignment=NSTextAlignmentRight;
self.duration.font=[UIFont fontWithName:@"Helvetica-bold" size:10];
//self.duration.backgroundColor=[UIColor greenColor];
self.duration.textColor=[UIColor blueColor];
self.duration.text = [NSString stringWithFormat:@"%02d:%02d", 0, 0];
[self.view addSubview:self.duration];
self.offset=[[UILabel alloc]initWithFrame:CGRectMake(0, self.jsVideoScrubber.frame.origin.y+self.jsVideoScrubber.frame.size.height, 30, 20)];
self.offset.textAlignment=NSTextAlignmentLeft;
self.offset.font=[UIFont fontWithName:@"Helvetica-bold" size:10];
// self.offset.backgroundColor=[UIColor greenColor];
self.offset.textColor=[UIColor blueColor];
self.offset.text= [NSString stringWithFormat:@"%02d:%02d", 0, 0];
[self.view addSubview:self.offset];
self.endoffset=[[UILabel alloc]initWithFrame:CGRectMake(ViewHeight-32, self.jsVideoScrubber.frame.origin.y+self.jsVideoScrubber.frame.size.height, 30, 20)];
self.endoffset.textAlignment=NSTextAlignmentLeft;
self.endoffset.font=[UIFont fontWithName:@"Helvetica-bold" size:10];
// self.offset.backgroundColor=[UIColor greenColor];
self.endoffset.textColor=[UIColor blueColor];
self.endoffset.text= [NSString stringWithFormat:@"%02d:%02d", 0, 0];
[self.view addSubview:self.offset];
_viedoEditButton = [UIButton buttonWithType:UIButtonTypeCustom];
_viedoEditButton.frame=CGRectMake(ViewWidth/2-256*0.2, [UIScreen mainScreen].bounds.size.height/2-100, 256*0.4, 256*0.4);
//_viedoEditButton.backgroundColor=[UIColor grayColor];
//[_viedoEditButton setImage:[UIImage imageNamed:@"play.png"] forState:UIControlStateNormal];
[_viedoEditButton addTarget:self action:@selector(testCompressionSession) forControlEvents:UIControlEventTouchUpInside];
_viedoEditButton.layer.cornerRadius=5;
_viedoEditButton.layer.borderWidth=5;
[_viedoEditButton setTitle:@"开始剪辑" forState:UIControlStateNormal];
_viedoEditButton.titleLabel.font=[UIFont fontWithName:@"Helvetica-Bold" size:16];
[self.view addSubview:_viedoEditButton];
_viedoaddMusicButton = [UIButton buttonWithType:UIButtonTypeCustom];
_viedoaddMusicButton.frame=CGRectMake(ViewWidth/2-256*0.2, [UIScreen mainScreen].bounds.size.height/2+20, 256*0.4, 256*0.4);
// _viedoaddMusicButton.backgroundColor=[UIColor grayColor];
//[_viedoEditButton setImage:[UIImage imageNamed:@"play.png"] forState:UIControlStateNormal];
[_viedoaddMusicButton addTarget:self action:@selector(addMusicToViedo:) forControlEvents:UIControlEventTouchUpInside];
_viedoaddMusicButton.layer.cornerRadius=5;
_viedoaddMusicButton.layer.borderWidth=5;
[_viedoaddMusicButton setTitle:@"添加背景音乐" forState:UIControlStateNormal];
_viedoaddMusicButton.titleLabel.font=[UIFont fontWithName:@"Helvetica-Bold" size:16];
[self.view addSubview:_viedoaddMusicButton];
_activityIndicatorView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
//创建一个UIActivityIndicatorView对象:_activityIndicatorView,并初始化风格。
_activityIndicatorView.frame = CGRectMake(ViewWidth/2, ViewHeight/2,0, 0);
_activityIndicatorView.color = [UIColor redColor];
//_activityIndicatorView.hidesWhenStopped = NO;
[self.view addSubview:_activityIndicatorView];
AVURLAsset* asset = nil;
NSString *filePath = [[NSBundle mainBundle] pathForResource:@"Green" ofType:@"mov"];
NSURL* url = [NSURL fileURLWithPath:filePath];
asset = [AVURLAsset URLAssetWithURL:url options:nil];
__weak CCViewController *ref = self;
NSArray *keys = [NSArray arrayWithObjects:@"tracks", @"duration", nil];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^(void) {
[ref.jsVideoScrubber setupControlWithAVAsset:asset];
double total = CMTimeGetSeconds(ref.jsVideoScrubber.duration);
int min = (int)total / 60;
int seconds = (int)total % 60;
ref.duration.text = [NSString stringWithFormat:@"%02d:%02d", min, seconds];
[ref updateOffsetLabel:self.jsVideoScrubber];
[ref.jsVideoScrubber addTarget:self action:@selector(updateOffsetLabel:) forControlEvents:UIControlEventValueChanged];
// ref.currentSelection = indexPath;
}];
[self extractFrames];
}
- (void) updateOffsetLabel:(JSVideoScrubber *) scrubber
{
NSLog(@"%f",self.jsVideoScrubber.offset);
int min = (int)self.jsVideoScrubber.offset / 60;
int seconds = (int)self.jsVideoScrubber.offset % 60;
CGFloat offsetx=(self.jsVideoScrubber.offset-2)/CMTimeGetSeconds(self.jsVideoScrubber.duration)*ViewWidth;
self.offset.text = [NSString stringWithFormat:@"%02d:%02d", min, seconds];
self.offset.frame=CGRectMake(offsetx, 94, 30, 20);
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
}
-(IBAction)addMusicToViedo:(id)sender
{
_activityIndicatorView.hidesWhenStopped = NO;
[_activityIndicatorView startAnimating];
[self CompileFilesToMakeMovie:nil withMovie:nil withAudio:nil];
}
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
/*CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);*/
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
- (IBAction)testCompressionSession
{
_activityIndicatorView.hidesWhenStopped = NO;
[_activityIndicatorView startAnimating];
//self.hud=[[CommonHelper sharedInstance]showHud:self title:@"视频剪辑中..." selector:@selector(reloadMainUIInThread) arg:nil targetView:self.view];
NSString *docDir = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
docDir= [NSString stringWithFormat:@"%@/%@%@",docDir,@"video",@".mp4"];
//CGSize size = CGSizeMake(640,480);//定义视频的大小
CGSize size=CGSizeMake(originViedoWidth, originViedoHeight);
[self writeMovieAtPath:docDir withSize:size inDuration:originDuration byFPS:originViedoFPS withStartTime:10 withEndTime:30];
}
//根据起止时间合成视频
- (void) writeMovieAtPath:(NSString *) path withSize:(CGSize) size
inDuration:(float)duration byFPS:(int32_t)fps withStartTime:(NSTimeInterval)starttime withEndTime:(NSTimeInterval)endtime
{
//int __block frameCount = 0;
NSError *error = nil;
//—-initialize compression engine 视频格式支持类型:AVFileTypeQuickTimeMovie , AVFileTypeMPEG4,AVFileTypeAMR
AVAssetWriter __block *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@"error = %@", [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(@"");
else
NSLog(@"");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
int __block frame = 0;
int start=0;
start=[_imageArry count]/duration *starttime;
int end=[_imageArry count];
end=[_imageArry count]/duration *endtime;
NSMutableArray *newArray=[[NSMutableArray alloc]initWithCapacity:2];
for(int i=start;i<end;i++)
{
[newArray addObject:[_imageArry objectAtIndex:i]];
}
//int imagesCount = [_imageArry count];
//float averageTime = duration/imagesCount;
//int averageFrame = (int)(averageTime * fps);
//NSLog(@"newcount:%d",newArray.count);
__weak CCViewController *ref=self;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= [newArray count])
{
[writerInput markAsFinished];
//[videoWriter finishWriting];
[videoWriter finishWritingWithCompletionHandler:^{
videoWriter=nil;
[ref.activityIndicatorView stopAnimating];
ref.activityIndicatorView.hidesWhenStopped = YES;
/*UIAlertView *recorderSuccessful = [[UIAlertView alloc] initWithTitle:@"" message:@"视频录制成功"
delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
[recorderSuccessful show];*/
}];
break;
}
CVPixelBufferRef buffer = NULL;
int idx = frame;
if(idx<[newArray count])
{
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[newArray objectAtIndex:idx] CGImage] size:size];
if (buffer)
{
//CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
//float frameSeconds = CMTimeGetSeconds(frameTime);
// NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f,%d",frameCount,fps,frameSeconds,frame);
// if(![adaptor appendPixelBuffer:buffer withPresentationTime:frameTime])
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame*fps, [_imageArry count]/duration*fps)])
NSLog(@"FAIL");
else
NSLog(@"sucess");
}
CFRelease(buffer);
}
else
{
break;
}
//frameCount = frameCount + averageFrame;
}
}];
}
- (void)extractFrames
{
NSString *filePath = [[NSBundle mainBundle] pathForResource:@"Green" ofType:@"mov"];
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]];
//setting up generator & compositor
self.generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
// self.imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:myAsset];
self.generator.requestedTimeToleranceBefore = kCMTimeZero;
self.generator.requestedTimeToleranceAfter = kCMTimeZero;
_generator.appliesPreferredTrackTransform = YES;
self.composition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:asset];
NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
NSTimeInterval frameDuration = CMTimeGetSeconds(_composition.frameDuration);
CGFloat totalFrames = round(duration/frameDuration);
//[lblFrames setText:[NSString stringWithFormat:@"%.2f Frames",totalFrames]];
//[lblVideoLength setText:[NSString stringWithFormat:@"Video Duration : %f",duration]];
NSMutableArray * times = [[NSMutableArray alloc] init];
_imageArry=[[NSMutableArray alloc]initWithCapacity:2];
NSLog(@"timescale:%d",_composition.frameDuration.timescale);
originViedoFPS=_composition.frameDuration.timescale;//获取每秒视频帧数
originDuration=duration;
// *** Fetch First 200 frames only test ok ***
/*for (int i=0; i<1528; i+=5) {
NSValue * time = [NSValue valueWithCMTime:CMTimeMakeWithSeconds(i*frameDuration, composition.frameDuration.timescale)];
[times addObject:time];
}*/
for (int i=0; i<(int)totalFrames; i+=1) {
NSValue * time = [NSValue valueWithCMTime:CMTimeMakeWithSeconds(i*frameDuration*3, _composition.frameDuration.timescale*3)];
[times addObject:time];
}
__block NSInteger count = 0;
AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
// If actualTime is not equal to requestedTime image is ignored
if(CMTimeCompare(actualTime, requestedTime) == 0)
{
if (result == AVAssetImageGeneratorSucceeded) {
// NSLog(@"%.02f %.02f", CMTimeGetSeconds(requestedTime), CMTimeGetSeconds(actualTime));
// Each log have differents actualTimes.
// frame extraction is here...
NSString *docDir = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
NSString *filePath = [docDir stringByAppendingPathComponent:[NSString stringWithFormat:@"%f.jpg",CMTimeGetSeconds(requestedTime)]];
[UIImageJPEGRepresentation([UIImage imageWithCGImage:im], 0.8f) writeToFile:filePath atomically:YES];
//[_imageArry addObject:[UIImage imageWithContentsOfFile:filePath]];
//因为拿到的是个路径 把它加载成一个data对象
NSData *data=[NSData dataWithContentsOfFile:filePath];
//直接把该 图片读出来
UIImage *img=[UIImage imageWithData:data];
[_imageArry addObject:img];
count++;
NSLog(@"filepath:%@,%d,%zu,%zu",filePath,count,CGImageGetWidth(im),CGImageGetHeight(im));
originViedoWidth=CGImageGetWidth(im);
originViedoHeight=CGImageGetHeight(im);
//[self performSelector:@selector(updateStatusWithFrame:) onThread:[NSThread mainThread] withObject:[NSString stringWithFormat:@"%d Processing %d of %.0f",count,count,totalFrames] waitUntilDone:NO];
}
else if(result == AVAssetImageGeneratorFailed)
{
//[lblFileStatus setText:@"Failed to Extract"];
}
else if(result == AVAssetImageGeneratorCancelled)
{
//[lblFileStatus setText:@"Process Cancelled"];
}
}
};
_generator.requestedTimeToleranceBefore = kCMTimeZero;
_generator.requestedTimeToleranceAfter = kCMTimeZero;
[_generator generateCGImagesAsynchronouslyForTimes:times completionHandler:handler];
}
- (void)removeAllFiles
{
NSFileManager *fm = [NSFileManager defaultManager];
NSString *directory = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
NSError *error = nil;
for (NSString *file in [fm contentsOfDirectoryAtPath:directory error:&error]) {
// NSLog(@"file:%@",[NSString stringWithFormat:@"%@/%@", directory, file]);
BOOL success = [fm removeItemAtPath:[NSString stringWithFormat:@"%@/%@", directory, file] error:&error];
if (!success || error) {
// it failed.
}
}
}
//视频添加背景音乐
-(void)CompileFilesToMakeMovie:(NSString *)toPath withMovie:(NSString *)moviePath withAudio:(NSString *)audioPath
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* audio_inputFileName = @"sound.caf";
NSString* audio_inputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], audio_inputFileName] ;
NSString *filePath = [[NSBundle mainBundle] pathForResource:@"布谷鸟" ofType:@"caf"];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:filePath];
NSString* video_inputFileName = @"video.mov";
NSString* video_inputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], video_inputFileName] ;
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = @"outputVeido.mov";
NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], outputFileName] ;
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
__weak CCViewController *ref = self;
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = @"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//ref.activityIndicatorView.hidesWhenStopped = NO;
[ref.activityIndicatorView stopAnimating];
ref.activityIndicatorView.hidesWhenStopped = YES;
}
];
}
@end