Reputation: 29
I am using the library "OWVideoProcessor" to cut parts of the live recording video. The video works fine on any apple devices, but when i play it on browser(Dropbox) it has some seconds added in front and the audio is also missing from those seconds added in front. You can see examples of this videos here: https://www.dropbox.com/s/2vyhqlfgfh6gzlk/file32167%281%29.mp4?dl=0 If you download the video on an apple device the video has 20 sec. if you play it in browser it has 29 sec.
This is the code for stitching the video:
- (void)stitchVideoWithDestinationPath:(NSString *)destinationPath completion:(void(^)(NSError *error))completion {
[self.exportSession cancelExport];
NSLog(@"export started to path: %@", destinationPath);
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime startTime = kCMTimeZero;
int lastIndex = self.segmentStart + self.segmentCount - 1;
NSLog(@"Stitching segments in interval: [%d - %d]", self.segmentStart, lastIndex);
for (int i = self.segmentCount - 5; i < lastIndex; i++) {
CMTimeShow(startTime);
NSURL *url = [OWUtilities urlForRecordingSegmentCount:i basePath:self.basePath];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:@{AVURLAssetPreferPreciseDurationAndTimingKey: @(YES)}];
NSAssert(asset, @"Invalid asset at: %@", url);
BOOL hasAllTracks = [[asset tracks] count] >= 2;
if (hasAllTracks) {
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVAssetTrack *track = nil;
track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[videoTrack insertTimeRange:timeRange ofTrack:track atTime:startTime error:nil];
track = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:timeRange ofTrack:track atTime:startTime error:nil];
startTime = CMTimeAdd(startTime, asset.duration);
}
}
NSTimeInterval segmentsDuration = CMTimeGetSeconds(startTime);
NSLog(@"Total segments duration: %.2f", segmentsDuration);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
if (![[NSFileManager defaultManager] fileExistsAtPath:destinationPath]) {
NSArray *filePathsArray = [NSArray new];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
filePathsArray = [[NSFileManager defaultManager] subpathsOfDirectoryAtPath:documentsDirectory error:nil];
documentsDirectory = [documentsDirectory stringByAppendingString:@"/uploads/"];
documentsDirectory = [documentsDirectory stringByAppendingString:[destinationPath lastPathComponent]];
if([[NSFileManager defaultManager] fileExistsAtPath:documentsDirectory]) {
destinationPath = documentsDirectory;
}
}
exporter.outputURL = [NSURL fileURLWithPath:destinationPath];
exporter.outputFileType = AVFileTypeMPEG4;
BOOL trimRange = (segmentsDuration > self.outputSegmentDuration);
if (trimRange) {
CMTime duration = CMTimeMakeWithSeconds(self.outputSegmentDuration, startTime.timescale);
NSTimeInterval startInterval = segmentsDuration - self.outputSegmentDuration;
CMTime start = CMTimeMakeWithSeconds(startInterval, startTime.timescale);
exporter.timeRange = CMTimeRangeMake(start, duration);
NSLog(@"Exporting segment:");
CMTimeRangeShow(exporter.timeRange);
NSTimeInterval segmentsDuration2 = CMTimeGetSeconds(duration);
NSLog(@"Total segments duration: %.2f", segmentsDuration2);
}
@weakify(self, exporter);
[exporter exportAsynchronouslyWithCompletionHandler:^{
@strongify(self, exporter);
NSLog(@"error: %@", exporter.error);
if (completion && (exporter.status != AVAssetExportSessionStatusCancelled)) {
completion(exporter.error);
} else {
completion(nil);
}
if (self.exportSession == exporter) {
self.exportSession = nil;
}
}];
self.exportSession = exporter;
}
Upvotes: 0
Views: 521
Reputation: 29
The problem was not in the code above. The problem was here:
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:width], AVVideoWidthKey,
[NSNumber numberWithInteger:height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger: bps ], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:300], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
This code is used to set up the video compression settings. The AVVideoAverageBitRateKey is set to low (like 600 kbit/s) and the AVVideoMaxKeyFrameIntervalKey was set too big. So i changed AVVideoMaxKeyFrameIntervalKey to 1 and i increased the AVVideoAverageBitRateKey to 5000 kbit/s. This solved my issue.
This code was written to decrease the video size. You can change it in the OWVideoProcessor library.
Upvotes: 2