-
Notifications
You must be signed in to change notification settings - Fork 49
/
AVUtilities.m
77 lines (58 loc) · 3.84 KB
/
AVUtilities.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
#import "AVUtilities.h"
#import <AVFoundation/AVFoundation.h>
@implementation AVUtilities
+ (AVAsset *)assetByReversingAsset:(AVAsset *)asset outputURL:(NSURL *)outputURL {
NSError *error;
// Initialize the reader
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] lastObject];
NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
outputSettings:readerOutputSettings];
[reader addOutput:readerOutput];
[reader startReading];
// read in the samples
NSMutableArray *samples = [[NSMutableArray alloc] init];
CMSampleBufferRef sample;
while(sample = [readerOutput copyNextSampleBuffer]) {
[samples addObject:(__bridge id)sample];
CFRelease(sample);
}
// Initialize the writer
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeMPEG4
error:&error];
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
@(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
nil];
NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
[writerInput setExpectsMediaDataInRealTime:NO];
// Initialize an input adaptor so that we can append PixelBuffer
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
[writer addInput:writerInput];
[writer startWriting];
[writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];
// Append the frames to the output.
// Notice we append the frames from the tail end, using the timing of the frames from the front.
for(NSInteger i = 0; i < samples.count; i++) {
// Get the presentation time for the frame
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);
// take the image/pixel buffer from tail end of the array
CVPixelBufferRef imageBufferRef = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);
while (!writerInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
[pixelBufferAdaptor appendPixelBuffer:imageBufferRef withPresentationTime:presentationTime];
}
[writer finishWriting];
return [AVAsset assetWithURL:outputURL];
}
@end